From 2ae3f08d8b47d23edfbf8fa04f512739ef09a458 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 3 Nov 2025 13:41:24 +0000 Subject: [PATCH 01/98] Update C-Based-Application-from-Scratch.md --- Projects/Projects/C-Based-Application-from-Scratch.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/C-Based-Application-from-Scratch.md b/Projects/Projects/C-Based-Application-from-Scratch.md index a90c0f3a..06816d0f 100644 --- a/Projects/Projects/C-Based-Application-from-Scratch.md +++ b/Projects/Projects/C-Based-Application-from-Scratch.md @@ -31,7 +31,7 @@ Modern, higher-level managed languages such as Java and Python enabling develope This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. -Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short jusification in your submission. +Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. @@ -60,3 +60,4 @@ To receive the benefits, you must show us your project through our [online form] ### Previous Submissions 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). + From 4948fc1e447fb1ea0e003e1cb1143a18e8257898 Mon Sep 17 00:00:00 2001 From: ci-bot Date: Mon, 3 Nov 2025 13:41:57 +0000 Subject: [PATCH 02/98] docs: auto-update --- docs/_data/navigation.yml | 256 +++++++++--------- ...-07-11-C-Based-Application-from-Scratch.md | 4 +- 2 files changed, 131 insertions(+), 129 deletions(-) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index cb6ca0ef..dd41fe3d 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -5,80 +5,38 @@ header: projects: - title: Projects children: - - title: C-Based-Application-from-Scratch - description: This self-service project goes back to the fundamentals. The challenge - is to develop an application of your choice but your are only permitted to use - the C language with as few dependencies as possible. - url: /2025/07/11/C-Based-Application-from-Scratch.html - subjects: - - Performance and Architecture - - Libraries - platform: - - IoT - sw-hw: - - Software - support-level: - - Self-Service - - Arm Ambassador Support - status: - - Published - - title: SpecINT2017-benchmarking-on-Arm64 - description: "This self-service project profiles SPEC CPU2017 on Arm64 servers\u2014\ - using GCC, Clang, and Arm Compiler with top-down analysis\u2014to reveal how\ - \ compiler choices and Arm micro-architectural features impact execution time,\ - \ energy efficiency, and performance bottlenecks." - url: /2025/05/30/SpecINT2017-benchmarking-on-Arm64.html + - title: Machine-Learning-on-AWS-Graviton + description: "This self-service project ports and tunes OpenSora text-to-video\ + \ transformers on AWS Graviton CPUs\u2014showcasing cost-efficient, quantized,\ + \ CPU-only inference pipelines and guiding best-practice optimization for Arm-based\ + \ cloud AI workloads." + url: /2025/05/30/Machine-Learning-on-AWS-Graviton.html subjects: - - Performance and Architecture + - ML - Migration to Arm + - Performance and Architecture platform: - Servers and Cloud Computing - - Laptops and Desktops - AI sw-hw: - Software - - Hardware support-level: - Self-Service - Arm Ambassador Support status: - Published - - title: Haskell-Compiler-Windows-on-Arm - description: "This self-service project brings native Glasgow Haskell Compiler\ - \ support to Windows on Arm\u2014unlocking efficient Arm-laptop builds, extending\ - \ Haskell\u2019s reach, and giving contributors hands-on experience with Arm64\ - \ code generation and runtime integration." - url: /2025/05/30/Haskell-Compiler-Windows-on-Arm.html + - title: R-Arm-Community-Support + description: "This self-service project boosts the R ecosystem on Windows on Arm\ + \ by identifying unsupported packages, upstreaming fixes, and automating builds\u2014\ + so data scientists can run their workflows natively on fast, efficient Arm64\ + \ laptops and desktops." + url: /2025/05/30/R-Arm-Community-Support.html subjects: - - Migration to Arm - Performance and Architecture - platform: - - Servers and Cloud Computing - - Laptops and Desktops - sw-hw: - - Software - - Hardware - support-level: - - Self-Service - - Arm Ambassador Support - status: - - Published - - title: AI-Powered-Porting-Tool - description: "This self-service project creates an AI-driven porting engine that\ - \ analyzes package dependencies, auto-generates fixes, and submits pull requests\u2014\ - accelerating native macOS and Windows-on-Arm support for bioinformatics and\ - \ R software so researchers can run demanding workflows directly on modern Arm\ - \ devices." - url: /2025/05/30/AI-Powered-Porting-Tool.html - subjects: - - CI-CD - - ML - Migration to Arm + - Libraries platform: - - Servers and Cloud Computing - Laptops and Desktops - - Mobile, Graphics, and Gaming - - AI sw-hw: - Software support-level: @@ -105,42 +63,38 @@ projects: - Arm Ambassador Support status: - Published - - title: Architecture-Insight-Dashboard - description: "This self-service project develops a data-rich dashboard that visualizes\ - \ the popularity of Arm CPU/OS combinations and pinpoints software-stack support\ - \ for specific extensions\u2014giving developers an instant, validated view\ - \ of where their workloads will run best." - url: /2025/05/30/Architecture-Insight-Dashboard.html + - title: Haskell-Compiler-Windows-on-Arm + description: "This self-service project brings native Glasgow Haskell Compiler\ + \ support to Windows on Arm\u2014unlocking efficient Arm-laptop builds, extending\ + \ Haskell\u2019s reach, and giving contributors hands-on experience with Arm64\ + \ code generation and runtime integration." + url: /2025/05/30/Haskell-Compiler-Windows-on-Arm.html subjects: + - Migration to Arm - Performance and Architecture - - Web platform: - Servers and Cloud Computing - Laptops and Desktops - - Mobile, Graphics, and Gaming - - AI sw-hw: - Software + - Hardware support-level: - Self-Service - Arm Ambassador Support status: - Published - - title: Arduino-IDE-Windows-on-Arm - description: "This self-service project ports and optimizes the Arduino IDE\u2014\ - patching its lzma-native dependency\u2014to run natively and efficiently on\ - \ Windows on Arm, giving developers hands-on experience with cross-platform\ - \ builds, Arm64 performance tuning, and upstream open-source contributions." - url: /2025/05/30/Arduino-IDE-Windows-on-Arm.html + - title: C-Based-Application-from-Scratch + description: This self-service project goes back to the fundamentals. The challenge + is to develop an application of your choice but your are only permitted to use + the C language with as few dependencies as possible. + url: /2025/07/11/C-Based-Application-from-Scratch.html subjects: - Performance and Architecture - - Migration to Arm - Libraries platform: - - Laptops and Desktops + - IoT sw-hw: - Software - - Hardware support-level: - Self-Service - Arm Ambassador Support @@ -165,16 +119,21 @@ projects: - Arm Ambassador Support status: - Published - - title: Responsible-AI-and-Yellow-Teaming - description: "This self-service project equips teams with a YellowTeamGPT workflow\ - \ that probes Arm-based AI products for unintended impacts\u2014turning responsible-AI\ - \ stress-testing into a core step of the development cycle." - url: /2025/05/30/Responsible-AI-and-Yellow-Teaming.html + - title: AI-Powered-Porting-Tool + description: "This self-service project creates an AI-driven porting engine that\ + \ analyzes package dependencies, auto-generates fixes, and submits pull requests\u2014\ + accelerating native macOS and Windows-on-Arm support for bioinformatics and\ + \ R software so researchers can run demanding workflows directly on modern Arm\ + \ devices." + url: /2025/05/30/AI-Powered-Porting-Tool.html subjects: + - CI-CD - ML + - Migration to Arm platform: - Servers and Cloud Computing - Laptops and Desktops + - Mobile, Graphics, and Gaming - AI sw-hw: - Software @@ -183,18 +142,19 @@ projects: - Arm Ambassador Support status: - Published - - title: Machine-Learning-on-AWS-Graviton - description: "This self-service project ports and tunes OpenSora text-to-video\ - \ transformers on AWS Graviton CPUs\u2014showcasing cost-efficient, quantized,\ - \ CPU-only inference pipelines and guiding best-practice optimization for Arm-based\ - \ cloud AI workloads." - url: /2025/05/30/Machine-Learning-on-AWS-Graviton.html + - title: Architecture-Insight-Dashboard + description: "This self-service project develops a data-rich dashboard that visualizes\ + \ the popularity of Arm CPU/OS combinations and pinpoints software-stack support\ + \ for specific extensions\u2014giving developers an instant, validated view\ + \ of where their workloads will run best." + url: /2025/05/30/Architecture-Insight-Dashboard.html subjects: - - ML - - Migration to Arm - Performance and Architecture + - Web platform: - Servers and Cloud Computing + - Laptops and Desktops + - Mobile, Graphics, and Gaming - AI sw-hw: - Software @@ -203,22 +163,40 @@ projects: - Arm Ambassador Support status: - Published - - title: Academic-Trends-Dashboard - description: "This self-service project creates a web-scraping, database-driven\ - \ dashboard that visualizes how computer-science research topics shift over\ - \ time\u2014helping Arm partners and chip architects align future hardware designs\ - \ with emerging algorithmic trends." - url: /2025/05/30/Academic-Trends-Dashboard.html + - title: Responsible-AI-and-Yellow-Teaming + description: "This self-service project equips teams with a YellowTeamGPT workflow\ + \ that probes Arm-based AI products for unintended impacts\u2014turning responsible-AI\ + \ stress-testing into a core step of the development cycle." + url: /2025/05/30/Responsible-AI-and-Yellow-Teaming.html subjects: - - Web - - Databases + - ML + platform: + - Servers and Cloud Computing + - Laptops and Desktops + - AI + sw-hw: + - Software + support-level: + - Self-Service + - Arm Ambassador Support + status: + - Published + - title: SpecINT2017-benchmarking-on-Arm64 + description: "This self-service project profiles SPEC CPU2017 on Arm64 servers\u2014\ + using GCC, Clang, and Arm Compiler with top-down analysis\u2014to reveal how\ + \ compiler choices and Arm micro-architectural features impact execution time,\ + \ energy efficiency, and performance bottlenecks." + url: /2025/05/30/SpecINT2017-benchmarking-on-Arm64.html + subjects: + - Performance and Architecture + - Migration to Arm platform: - Servers and Cloud Computing - Laptops and Desktops - - Mobile, Graphics, and Gaming - AI sw-hw: - Software + - Hardware support-level: - Self-Service - Arm Ambassador Support @@ -263,17 +241,39 @@ projects: - Arm Ambassador Support status: - Published - - title: HPC-Algorithm - description: "This self-service project is around finding a HPC algorithm and\ - \ accelerating it with Arm\u2019s SVE/SVE2 vectorization\u2014demonstrating\ - \ how next-generation Arm hardware can deliver significant, scalable performance\ - \ gains." - url: /2025/05/30/HPC-Algorithm.html + - title: Arduino-IDE-Windows-on-Arm + description: "This self-service project ports and optimizes the Arduino IDE\u2014\ + patching its lzma-native dependency\u2014to run natively and efficiently on\ + \ Windows on Arm, giving developers hands-on experience with cross-platform\ + \ builds, Arm64 performance tuning, and upstream open-source contributions." + url: /2025/05/30/Arduino-IDE-Windows-on-Arm.html subjects: - Performance and Architecture + - Migration to Arm + - Libraries + platform: + - Laptops and Desktops + sw-hw: + - Software + - Hardware + support-level: + - Self-Service + - Arm Ambassador Support + status: + - Published + - title: Academic-Trends-Dashboard + description: "This self-service project creates a web-scraping, database-driven\ + \ dashboard that visualizes how computer-science research topics shift over\ + \ time\u2014helping Arm partners and chip architects align future hardware designs\ + \ with emerging algorithmic trends." + url: /2025/05/30/Academic-Trends-Dashboard.html + subjects: + - Web + - Databases platform: - Servers and Cloud Computing - Laptops and Desktops + - Mobile, Graphics, and Gaming - AI sw-hw: - Software @@ -282,18 +282,18 @@ projects: - Arm Ambassador Support status: - Published - - title: R-Arm-Community-Support - description: "This self-service project boosts the R ecosystem on Windows on Arm\ - \ by identifying unsupported packages, upstreaming fixes, and automating builds\u2014\ - so data scientists can run their workflows natively on fast, efficient Arm64\ - \ laptops and desktops." - url: /2025/05/30/R-Arm-Community-Support.html + - title: HPC-Algorithm + description: "This self-service project is around finding a HPC algorithm and\ + \ accelerating it with Arm\u2019s SVE/SVE2 vectorization\u2014demonstrating\ + \ how next-generation Arm hardware can deliver significant, scalable performance\ + \ gains." + url: /2025/05/30/HPC-Algorithm.html subjects: - Performance and Architecture - - Migration to Arm - - Libraries platform: + - Servers and Cloud Computing - Laptops and Desktops + - AI sw-hw: - Software support-level: @@ -303,17 +303,18 @@ projects: - Published - title: Extended Team Projects children: - - title: Human-Centric-Robotics - description: "This team project will build and test an Arm-based urban service\ - \ robot\u2014merging real-time navigation, vision-guided manipulation, and human\ - \ interaction\u2014and model its socioeconomic impact to show how Arm platforms\ - \ can transform last-mile delivery, eldercare, or other city services." - url: /2025/05/30/Human-Centric-Robotics.html + - title: Compliance-Ready-Smart-Camera-System + description: "This challenge will create and validate an Arm-based, smart camera\ + \ pipeline on virtual automotive hardware\u2014advancing safer, more developer-friendly\ + \ driver-monitoring solutions for next-generation vehicles." + url: /2025/05/30/Compliance-Ready-Smart-Camera-System.html subjects: - - ML + - Security - Embedded Linux - - RTOS Fundamentals + - ML + - Virtual Hardware platform: + - Mobile, Graphics, and Gaming - Automotive - IoT - Embedded and Microcontrollers @@ -325,18 +326,17 @@ projects: - Self-Service - Arm Ambassador Support - Direct Support from Arm - - title: Compliance-Ready-Smart-Camera-System - description: "This challenge will create and validate an Arm-based, smart camera\ - \ pipeline on virtual automotive hardware\u2014advancing safer, more developer-friendly\ - \ driver-monitoring solutions for next-generation vehicles." - url: /2025/05/30/Compliance-Ready-Smart-Camera-System.html + - title: Human-Centric-Robotics + description: "This team project will build and test an Arm-based urban service\ + \ robot\u2014merging real-time navigation, vision-guided manipulation, and human\ + \ interaction\u2014and model its socioeconomic impact to show how Arm platforms\ + \ can transform last-mile delivery, eldercare, or other city services." + url: /2025/05/30/Human-Centric-Robotics.html subjects: - - Security - - Embedded Linux - ML - - Virtual Hardware + - Embedded Linux + - RTOS Fundamentals platform: - - Mobile, Graphics, and Gaming - Automotive - IoT - Embedded and Microcontrollers diff --git a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md index 2042971f..6b762fee 100644 --- a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md +++ b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md @@ -60,6 +60,7 @@ full_description: |- ### Previous Submissions 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). + 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). --- ## Description @@ -98,4 +99,5 @@ Standout project contributions will result in preferential internal referrals to To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ### Previous Submissions -1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). \ No newline at end of file +1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). +2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). \ No newline at end of file From fecf8d1ab5adbb4501bd6dbde36bf2e23e5f5db2 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 20 Nov 2025 11:17:37 +0000 Subject: [PATCH 03/98] Revise AI agent project details and prerequisites Updated project metadata and added badges for visibility. Enhanced project description and prerequisites for clarity. --- Projects/Projects/AI-Agents.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/AI-Agents.md b/Projects/Projects/AI-Agents.md index 8bab6c46..40ce57cf 100644 --- a/Projects/Projects/AI-Agents.md +++ b/Projects/Projects/AI-Agents.md @@ -19,6 +19,7 @@ license: status: - "Published" donation: +badges: [trending] --- @@ -63,4 +64,5 @@ To receive the benefits, you must show us your project through our [online form] 2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) -3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) \ No newline at end of file + +3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) From 8c782be457d8c98177b047aa9a5c97fad73a40ed Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 20 Nov 2025 13:00:07 +0000 Subject: [PATCH 04/98] Refactor update_docs.py with new structure and helpers Refactor update_docs.py for improved organization and functionality. --- scripts/update_docs.py | 330 +++++++++++++++++++++++------------------ 1 file changed, 188 insertions(+), 142 deletions(-) diff --git a/scripts/update_docs.py b/scripts/update_docs.py index 5ac4822b..09015906 100644 --- a/scripts/update_docs.py +++ b/scripts/update_docs.py @@ -1,23 +1,32 @@ import os -import sys import re import shutil from pathlib import Path -import frontmatter from datetime import datetime -import ruamel.yaml - -projects_dir = "../Projects/Projects" -extended_projects_dir = "../Projects/Extended-Team-Projects" -projects_pathlist = [Path("../Projects/projects.md")] -projects_projects_pathlist = Path(projects_dir).rglob("*.md") -projects_extended_project_pathlist = Path(extended_projects_dir).rglob("*.md") -research_pathlist = [Path("../Research/research.md")] - -docs_posts_dir = "../docs/_posts" - -index_frontmatter = """--- +import frontmatter +import ruamel.yaml +from ruamel.yaml.scalarstring import LiteralScalarString + +# ---------------------------- +# Paths (run this script from /scripts or project root) +# ---------------------------- +ROOT = Path(__file__).resolve().parent +REPO_ROOT = ROOT.parent +DOCS_DIR = REPO_ROOT / "docs" +DOCS_POSTS_DIR = DOCS_DIR / "_posts" +DOCS_IMAGES_DIR = DOCS_DIR / "images" +DOCS_CONFIG = DOCS_DIR / "_config.yml" + +PROJECTS_DIR = REPO_ROOT / "Projects" / "Projects" +EXT_PROJECTS_DIR = REPO_ROOT / "Projects" / "Extended-Team-Projects" + +PROJECTS_PATHLIST = [REPO_ROOT / "Projects" / "projects.md"] +PROJECTS_PROJECTS_PATHLIST = PROJECTS_DIR.rglob("*.md") +PROJECTS_EXTENDED_PATHLIST = EXT_PROJECTS_DIR.rglob("*.md") +RESEARCH_PATHLIST = [REPO_ROOT / "Research" / "research.md"] + +INDEX_FRONTMATTER = """--- title: Academic Projects Repository tags: TeXt article_header: @@ -27,46 +36,118 @@ --- """ +# ---------------------------- +# Config helpers +# ---------------------------- +def load_baseurl() -> str: + """ + Reads baseurl from docs/_config.yml. + Falls back to '/DLFIXES' if not present (your site is https://v3x-0e.github.io/DLFIXES/). + Ensures it starts with '/' and does not end with '/' (Jekyll convention). + """ + default_baseurl = "/DLFIXES" + if not DOCS_CONFIG.exists(): + return default_baseurl + + try: + import yaml as pyyaml + cfg = pyyaml.safe_load(DOCS_CONFIG.read_text(encoding="utf-8")) or {} + baseurl = cfg.get("baseurl", "") or default_baseurl + if not baseurl.startswith("/"): + baseurl = "/" + baseurl + baseurl = baseurl.rstrip("/") + return baseurl + except Exception: + return default_baseurl + + +BASEURL = load_baseurl() + +# ---------------------------- +# Utilities +# ---------------------------- def clean(): - clean_lst = [docs_posts_dir] - for dirpath in clean_lst: - if os.path.exists(dirpath) and os.path.isdir(dirpath): - shutil.rmtree(dirpath) - os.makedirs(dirpath) - else: - os.makedirs(dirpath) + DOCS_POSTS_DIR.mkdir(parents=True, exist_ok=True) + # wipe posts dir + shutil.rmtree(DOCS_POSTS_DIR, ignore_errors=True) + DOCS_POSTS_DIR.mkdir(parents=True, exist_ok=True) +def slugify(filename: str) -> str: + """ + Build a URL-safe slug from the filename (without extension). + Lowercase, replace non [a-z0-9-] with '-'. + """ + stem = Path(filename).stem + slug = re.sub(r'[^a-z0-9\-]+', '-', stem.lower()).strip('-') + return slug or "post" + +def norm_date(meta) -> str: + """ + Accepts: + - front matter key 'date' or 'publication-date' + - datetime or string + Returns: 'YYYY-MM-DD' + """ + if meta is None: + return None + if isinstance(meta, datetime): + return meta.strftime("%Y-%m-%d") + s = str(meta) + try: + # Handles 'YYYY-MM-DD' and full ISO timestamps + return datetime.fromisoformat(s).strftime("%Y-%m-%d") + except ValueError: + # Fallback: first 10 chars + return s[:10] + +# ---------------------------- +# Content transforms +# ---------------------------- def convert_md_images_to_html(md_text: str, doc_path: Path) -> str: + """ + - Finds Markdown images ![alt](path) + - Copies each image into docs/images/ + - Rewrites to + - Skips the README banner ./images/DeveloperLabs_Header.png entirely. + """ pattern = r'!\[[^\]]*\]\(([^)]+)\)' - docs_dir_path = Path("../docs") def replace(match): - img_path = match.group(1) - - # Skip certain banner images entirely - if doc_path.resolve() == Path("../README.md").resolve() and img_path == "./images/DeveloperLabs_Header.png": + img_path = match.group(1).strip() + # Skip a specific header if converting README.md + if doc_path.resolve() == (REPO_ROOT / "README.md").resolve() and img_path == "./images/DeveloperLabs_Header.png": return "" source_path = (doc_path.parent / img_path).resolve() - target_folder = (docs_dir_path / "images").resolve() - target_folder.mkdir(parents=True, exist_ok=True) + DOCS_IMAGES_DIR.mkdir(parents=True, exist_ok=True) if source_path.is_file(): - shutil.copy2(source_path, target_folder) + try: + shutil.copy2(source_path, DOCS_IMAGES_DIR) + except Exception as e: + print(f"Warning: could not copy {source_path} -> {DOCS_IMAGES_DIR}: {e}") else: - print(f"Warning: {source_path} does not exist in {doc_path}!") + print(f"Warning: {source_path} does not exist (referenced in {doc_path})") - new_img_path = f"/Arm-Developer-Labs/images/{Path(img_path).name}" + fname = Path(img_path).name + new_img_path = f"{BASEURL}/images/{fname}" - if "ACA_badge.jpg" in new_img_path: - return f'' - return f'' + # Special size override + if "ACA_badge.jpg" in fname: + return f'' + return f'' return re.sub(pattern, replace, md_text) def convert_md(md_text: str) -> str: + """ + Specific content replacements: + - Replace 'Developer Labs Website' link with 'Developer Labs Repository' + - Replace a specific YouTube thumbnail link with an ' ) - replaced_md = md_text - if pattern_youtube in replaced_md: - replaced_md = replaced_md.replace(pattern_youtube, replacement_youtube) - if pattern_link in replaced_md: - replaced_md = replaced_md.replace(pattern_link, replacement_link) - - return replaced_md + replaced = md_text + if pattern_youtube in replaced: + replaced = replaced.replace(pattern_youtube, replacement_youtube) + if pattern_link in replaced: + replaced = replaced.replace(pattern_link, replacement_link) + return replaced - -def format_content(pathlist, docs_path): +# ---------------------------- +# Main formatter +# ---------------------------- +def write_post_from_path(path: Path, out_dir: Path): """ - For each Path in pathlist: - - Load frontmatter metadata. - - Extract the 'date' field from metadata (expected YYYY-MM-DD or datetime). - - Use that date plus the filename-slug to name the output: "-.md". - - Convert images and special markdown embeds, then write to docs_path. + Read a markdown file with front matter, normalize metadata, and emit + a Jekyll post into out_dir with name YYYY-MM-DD-.md """ - for path in pathlist: - path = Path(path) + path = Path(path) - if path.name == "README.md": - continue + if path.name == "README.md": + return - raw_text = path.read_text(encoding="utf-8") - post = frontmatter.loads(raw_text) + raw_text = path.read_text(encoding="utf-8") + post = frontmatter.loads(raw_text) + + # Prefer 'date', then 'publication-date', else file mtime + stat = path.stat() + date_meta = post.metadata.get("date") or post.metadata.get("publication-date") + date_str = normalize_date(date_meta, stat.st_mtime) + + slug = slugify(path.name) + + # Force layout to "article" + post.metadata["layout"] = "article" + + # Only set sidebar nav if it's a project-level file (not the top-level projects.md) + if path.name != "projects.md": + sidebar = post.metadata.get("sidebar") or {} + sidebar.setdefault("nav", "projects") + post.metadata["sidebar"] = sidebar + + # Optional: store full_description for templates that need it + post.metadata["full_description"] = post.content + + # YAML writing with ruamel (preserve quotes; literal scalars for multiline strings) + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.width = 4096 + + metadata_copy = dict(post.metadata) + for key, value in list(metadata_copy.items()): + if isinstance(value, str) and "\n" in value: + metadata_copy[key] = LiteralScalarString(value) + + stream = StringIO() + yaml.dump(metadata_copy, stream) + yaml_content = stream.getvalue() + + # Build full content: front matter + original content + formatted = f"---\n{yaml_content}---\n{post.content}" + + # Apply markdown-specific conversions and image rewrite + formatted = convert_md(formatted) + formatted = convert_md_images_to_html(formatted, path) + + new_filename = f"{date_str}-{slug}.md" + out_path = out_dir / new_filename + out_path.write_text(formatted, encoding="utf-8") + print(f"[OK] Wrote {out_path.relative_to(REPO_ROOT)}") - # If there's a 'date' key in frontmatter, normalize it to "YYYY-MM-DD" - date_meta = post.metadata.get("publication-date") - if date_meta is None: - # If there's no date, fallback to file's modified date - timestamp = path.stat().st_mtime - date_str = datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d") - elif isinstance(date_meta, datetime): - date_str = date_meta.strftime("%Y-%m-%d") - else: - # If it's already a string, trust it but ensure formatting - try: - parsed = datetime.fromisoformat(str(date_meta)) - date_str = parsed.strftime("%Y-%m-%d") - except ValueError: - # If it isn't ISO, just take the first 10 chars - date_str = str(date_meta)[:10] - - # Build a slug from the filename (without .md) - filename = path.name # e.g. "my-project.md" - slug = filename.removesuffix(".md") - - # For certain top-level markdowns ("projects.md", "research.md"), - # we assign a special article_header override: - # if path.name in ["projects.md", "research.md"]: - # post.metadata["article_header"] = { - # "type": "cover", - # "image": { - # "src": "/images/DeveloperLabs_Header.png", - # }, - # } - - # Always set layout to "article" - post.metadata["layout"] = "article" - - # Only set sidebar nav if it's a project‐level file (not the top-level README) - if path.name != "projects.md": - post.metadata["sidebar"] = {"nav": "projects"} - - data = {"full_description": post.content} - post.metadata.update(data) - - # Use ruamel.yaml for proper literal block scalar formatting - yaml = ruamel.yaml.YAML() - yaml.preserve_quotes = True - yaml.width = 4096 - - # Create the frontmatter manually to ensure literal block scalars - metadata_copy = post.metadata.copy() - - # Convert multiline strings to literal scalars - for key, value in metadata_copy.items(): - if isinstance(value, str) and '\n' in value: - metadata_copy[key] = ruamel.yaml.scalarstring.LiteralScalarString(value) - - # Manually construct the frontmatter - from io import StringIO - stream = StringIO() - yaml.dump(metadata_copy, stream) - yaml_content = stream.getvalue() - - # Build the full content with frontmatter - formatted_content = f"---\n{yaml_content}---\n{post.content}" - - # Convert Markdown image embeds → HTML and copy assets - converted_content = convert_md_images_to_html( - formatted_content, - path - ) - - # Build the new filename: "-.md" - new_filename = f"{date_str}-{slug}.md" - out_file = Path(docs_path, new_filename) - out_file.write_text(converted_content, encoding="utf-8") def format_index(): - src = "../README.md" - docs_path = "../docs" - with open(src, 'r', encoding='utf-8') as f: - # Prepend our custom frontmatter, then convert images/embeds - combined = index_frontmatter + f.read() - formatted_content = convert_md(combined) - converted_content = convert_md_images_to_html( - formatted_content, - Path(src) - ) - out_file = os.path.join(docs_path, "index.md") - with open(out_file, 'w', encoding='utf-8') as out_f: - out_f.write(converted_content) + """ + Build docs/index.md from README.md + custom front matter, + with markdown conversions and image handling. + """ + src = REPO_ROOT / "README.md" + combined = INDEX_FRONTMATTER + src.read_text(encoding="utf-8") + combined = convert_md(combined) + combined = convert_md_images_to_html(combined, src) + + out_file = DOCS_DIR / "index.md" + out_file.write_text(combined, encoding="utf-8") + print(f"[OK] Wrote {out_file.relative_to(REPO_ROOT)}") + def main(): + print(f"[INFO] Using baseurl: {BASEURL}") clean() format_index() - format_content(projects_pathlist, docs_posts_dir) - format_content(projects_projects_pathlist, docs_posts_dir) - format_content(projects_extended_project_pathlist, docs_posts_dir) + + # Explicit list: projects.md, then actual project dirs + for p in PROJECTS_PATHLIST: + if Path(p).exists(): + write_post_from_path(Path(p), DOCS_POSTS_DIR) + + for p in PROJECTS_PROJECTS_PATHLIST: + write_post_from_path(Path(p), DOCS_POSTS_DIR) + + for p in PROJECTS_EXTENDED_PATHLIST: + write_post_from_path(Path(p), DOCS_POSTS_DIR) + + # If you ever want research posts too, uncomment: + # for p in RESEARCH_PATHLIST: + # if Path(p).exists(): + # write_post_from_path(Path(p), DOCS_POSTS_DIR) + if __name__ == "__main__": main() From 7f7b567519a4fcd4948184e0074f61f643ea29b4 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Wed, 3 Dec 2025 17:13:08 +0000 Subject: [PATCH 19/98] Update AI-Agents.md with project details changing how front matter is displayed --- Projects/Projects/AI-Agents.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/AI-Agents.md b/Projects/Projects/AI-Agents.md index 40ce57cf..d026e463 100644 --- a/Projects/Projects/AI-Agents.md +++ b/Projects/Projects/AI-Agents.md @@ -19,7 +19,7 @@ license: status: - "Published" donation: -badges: [trending] +badges: trending --- @@ -66,3 +66,4 @@ To receive the benefits, you must show us your project through our [online form] 3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) + From 4ae7371bff452adf8a18619f9d3a539dec0da036 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Wed, 3 Dec 2025 17:16:16 +0000 Subject: [PATCH 20/98] Add 'new' badge to navigation.yml --- docs/_data/navigation.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 130e34df..6adb68b8 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -46,6 +46,7 @@ projects: - Published badges: - trending + - new - title: AI-Agents description: "This self-service project builds a sandboxed AI agent on Arm hardware\ \ that harnesses appropriately sized LLMs to safely automate complex workflows\u2014\ From 4a09226b851ef96e25501dc190799aaf6a8e3c76 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Wed, 3 Dec 2025 17:18:53 +0000 Subject: [PATCH 21/98] Remove 'new' badge from AI-Agents navigation Removed 'new' badge from the AI-Agents section. --- docs/_data/navigation.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 6adb68b8..130e34df 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -46,7 +46,6 @@ projects: - Published badges: - trending - - new - title: AI-Agents description: "This self-service project builds a sandboxed AI agent on Arm hardware\ \ that harnesses appropriately sized LLMs to safely automate complex workflows\u2014\ From 92a59b80eef4abd483976c3d5714ef46c47089a1 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:07:00 +0000 Subject: [PATCH 22/98] Add files via upload --- docs/assets/badges/RA.svg | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 docs/assets/badges/RA.svg diff --git a/docs/assets/badges/RA.svg b/docs/assets/badges/RA.svg new file mode 100644 index 00000000..84d8e397 --- /dev/null +++ b/docs/assets/badges/RA.svg @@ -0,0 +1,4 @@ + + + Recently added + \ No newline at end of file From 9e9c254ffbaef5d61f12872ed784e128222c8fd2 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:07:54 +0000 Subject: [PATCH 23/98] Add 'Recently added' badge to badges.yml Added 'Recently added' badge with corresponding file and alt text. --- docs/_data/badges.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/_data/badges.yml b/docs/_data/badges.yml index 0fc12fef..35d743e6 100644 --- a/docs/_data/badges.yml +++ b/docs/_data/badges.yml @@ -7,3 +7,6 @@ trending: updated: file: updated.svg alt: "Recently updated" +Recently added: + file: RA.svg + alt: "Recently Added" From c2f3f42e18cda622ae81e9f94e80e748da45118c Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:08:37 +0000 Subject: [PATCH 24/98] Add badge for recently added to navigation --- docs/_data/navigation.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 130e34df..6151fef4 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -25,6 +25,8 @@ projects: - Arm Ambassador Support status: - Published + badges: + - Recently added - title: R-Arm-Community-Support description: "This self-service project boosts the R ecosystem on Windows on Arm\ \ by identifying unsupported packages, upstreaming fixes, and automating builds\u2014\ From d35bb684b3da4b0cca067648fd33723632045388 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:10:43 +0000 Subject: [PATCH 25/98] Rename 'Recently added' to 'recently_added' --- docs/_data/badges.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/_data/badges.yml b/docs/_data/badges.yml index 35d743e6..d8798ef7 100644 --- a/docs/_data/badges.yml +++ b/docs/_data/badges.yml @@ -7,6 +7,7 @@ trending: updated: file: updated.svg alt: "Recently updated" -Recently added: +recently_added: file: RA.svg alt: "Recently Added" + From 656d7d6fa3860ab483a7956a8f9e844d2e91fb6a Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:11:57 +0000 Subject: [PATCH 26/98] Rename badge from 'Recently added' to 'recently_added' --- docs/_data/navigation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 6151fef4..79aa83f9 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -26,7 +26,7 @@ projects: status: - Published badges: - - Recently added + - recently_added - title: R-Arm-Community-Support description: "This self-service project boosts the R ecosystem on Windows on Arm\ \ by identifying unsupported packages, upstreaming fixes, and automating builds\u2014\ From e2c3f9814d425094fd7a068f2181301ec1ad4156 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:20:29 +0000 Subject: [PATCH 27/98] Add 'recently_added' badge to navigation entry --- docs/_data/navigation.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 79aa83f9..13134735 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -67,6 +67,8 @@ projects: - Arm Ambassador Support status: - Published + badges: + - recently_added - title: Haskell-Compiler-Windows-on-Arm description: "This self-service project brings native Glasgow Haskell Compiler\ \ support to Windows on Arm\u2014unlocking efficient Arm-laptop builds, extending\ From 04bdfd77f343e1795d063573e612df3019452d07 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:23:32 +0000 Subject: [PATCH 28/98] Add badge for recently added project --- docs/_data/navigation.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 13134735..22032ad4 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -89,6 +89,8 @@ projects: - Arm Ambassador Support status: - Published + badges: + - recently_added - title: C-Based-Application-from-Scratch description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use From db469a2411ac5ea89bebd3118e9a2c75d562b49f Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:42:12 +0000 Subject: [PATCH 29/98] Update update_docs.py --- scripts/update_docs.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/scripts/update_docs.py b/scripts/update_docs.py index 69fcaeaa..0229b3da 100644 --- a/scripts/update_docs.py +++ b/scripts/update_docs.py @@ -116,7 +116,8 @@ def normalize_date(meta_value, fallback_timestamp: float) -> str: def convert_md_images_to_html(md_text: str, doc_path: Path) -> str: """ - Finds Markdown images ![alt](path) - - Copies each image into docs/images/ + - Only rewrites *relative* image paths (no http(s)://, no leading /) + - Copies each such image into docs/images/ - Rewrites to - Skips the README banner ./images/DeveloperLabs_Header.png entirely. """ @@ -125,11 +126,16 @@ def convert_md_images_to_html(md_text: str, doc_path: Path) -> str: def replace(match): img_path = match.group(1).strip() - # Skip a specific header if converting README.md + # 1) If this is an absolute URL or already site-rooted, leave it alone + if img_path.startswith("http://") or img_path.startswith("https://") or img_path.startswith("/"): + return match.group(0) # return the original markdown image unchanged + + # 2) Skip a specific header if converting README.md readme_path = REPO_ROOT / "README.md" if doc_path.resolve() == readme_path.resolve() and img_path == "./images/DeveloperLabs_Header.png": return "" + # 3) Treat as a relative filesystem path source_path = (doc_path.parent / img_path).resolve() DOCS_IMAGES_DIR.mkdir(parents=True, exist_ok=True) @@ -281,3 +287,4 @@ def main(): if __name__ == "__main__": main() + From 1a80e36414b58d62d3089cb840f74f7da76a3cc5 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:51:14 +0000 Subject: [PATCH 30/98] Update _config.yml --- docs/_config.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/_config.yml b/docs/_config.yml index 936eeddb..a26f5f11 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -18,8 +18,8 @@ ############################## text_skin: default # "default" (default), "dark", "forest", "ocean", "chocolate", "orange" highlight_theme: default # "default" (default), "tomorrow", "tomorrow-night", "tomorrow-night-eighties", "tomorrow-night-blue", "tomorrow-night-bright" -url: https://v3x-0e.github.io -baseurl: /DLFIXES +url: +baseurl: /Arm-Developer-Labs title : description: > # this means to ignore newlines until "Language & timezone" Developer Labs @@ -224,3 +224,4 @@ plugins: - jekyll-sitemap - jemoji + From c4db6ce3d08368f155e9c0d40a74a391bbaaa95c Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 10:52:59 +0000 Subject: [PATCH 31/98] Refactor update_docs.py for improved functionality --- scripts/update_docs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/update_docs.py b/scripts/update_docs.py index 0229b3da..08751177 100644 --- a/scripts/update_docs.py +++ b/scripts/update_docs.py @@ -40,7 +40,7 @@ # ---------------------------- # Config helpers # ---------------------------- -def load_baseurl(default="/DLFIXES") -> str: +def load_baseurl(default="/Arm-Developer-labs") -> str: """ Reads baseurl from docs/_config.yml. - Falls back to provided default if file or key missing. @@ -288,3 +288,4 @@ def main(): if __name__ == "__main__": main() + From 51d4dab991841b5926e0fdf4d53ed730646017f2 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 4 Dec 2025 11:32:35 +0000 Subject: [PATCH 32/98] Refactor article layout with improved structure --- docs/_layouts/article.html | 61 +++++++++++++++++++++----------------- 1 file changed, 34 insertions(+), 27 deletions(-) diff --git a/docs/_layouts/article.html b/docs/_layouts/article.html index 1687fc41..5979e57b 100644 --- a/docs/_layouts/article.html +++ b/docs/_layouts/article.html @@ -118,7 +118,7 @@ - +
{%- assign nav_root = site.data.navigation["projects"] -%} {%- for category in nav_root -%} @@ -128,10 +128,20 @@

{{ category.title }}

{%- include snippets/get-nav-url.html path=_child.url -%} {%- assign _nav_url = __return -%} - {%- comment -%} normalize URL for matching against .url {%- endcomment -%} - {%- assign _nav_url_rel = _nav_url | replace: site.url, '' | replace: site.baseurl, '' -%} + {%- comment -%} + Normalize URL so it matches how site.posts/pages store .url + (no site.url, no baseurl) + {%- endcomment -%} + {%- assign _nav_url_rel = _nav_url + | replace: site.url, '' + | replace: site.baseurl, '' -%} - {%- comment -%} resolve badges {%- endcomment -%} + {%- comment -%} + Resolve badges: + 1) Prefer badges from navigation data (_child.badges) + 2) If not present, look up the matching post/page by URL + and read its front-matter badges + {%- endcomment -%} {%- assign _badges = _child.badges -%} {%- if _badges == nil or _badges == empty -%} {%- assign _doc = site.posts | where: "url", _nav_url_rel | first -%} @@ -139,33 +149,29 @@

{{ category.title }}

{%- if _doc and _doc.badges -%}{%- assign _badges = _doc.badges -%}{%- endif -%} {%- endif -%} - {%- comment -%} -We already resolved _badges above (from _child.badges or from matching posts/pages) -{%- endcomment -%} -
  • - -
    - {{ _child.title }} - {%- if _badges -%} - {% include badges.html items=_badges %} - {%- endif -%} -
    - -

    - {{ _child.description }} -

    -
  • - +
  • + +
    + {{ _child.title }} + {%- if _badges -%} + {% include badges.html items=_badges %} + {%- endif -%} +
    + +

    + {{ _child.description }} +

    +
  • {%- endfor -%} {%- endfor -%}
    - {%- include scripts/variables.html -%} +
    {{ content }} @@ -36,4 +39,3 @@ - From 94111cde1944c4afc3a4f6da24b2d59e2c3e0a9f Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 18 Dec 2025 14:19:15 +0000 Subject: [PATCH 45/98] Update base.html --- docs/_layouts/base.html | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index 0019d28c..8894dc78 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -1,24 +1,26 @@ --- layout: none --- + {%- include snippets/get-lang.html -%} {%- include analytics.html -%} {%- include head.html -%} + - - - - - + + + + + - - - - - + + + + + {%- include scripts/variables.html -%} +
    {{ content }} @@ -36,4 +39,3 @@ - From 55a5e639280ee5d8d132eeaafcffa6d23a9de79b Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 18 Dec 2025 14:32:10 +0000 Subject: [PATCH 46/98] Update base.html --- docs/_layouts/base.html | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index 8894dc78..cd443940 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -14,13 +14,13 @@ - + - + + From 3b2c3ef62b7a863c476e3260efebf72db18f94eb Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 18 Dec 2025 14:40:40 +0000 Subject: [PATCH 47/98] Update base.html --- docs/_layouts/base.html | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index cd443940..943816d4 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -13,30 +13,20 @@ - - + + - + - - {%- include scripts/variables.html -%}
    {{ content }}
    - - From eb4dd193950b73f9853b34be5de0f51b1d3b3cd0 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 18 Dec 2025 14:47:46 +0000 Subject: [PATCH 48/98] Update base.html --- docs/_layouts/base.html | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index 943816d4..7ac0d326 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -13,7 +13,7 @@ - + @@ -30,3 +30,4 @@
    + From 95a55d07c68b9e7a83202ac5abc73f56a93dd28f Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 18 Dec 2025 15:15:45 +0000 Subject: [PATCH 49/98] Update base.html --- docs/_layouts/base.html | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index 7ac0d326..a3f058b0 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -13,7 +13,7 @@ - + @@ -31,3 +31,4 @@ + From 32e2bab43f8c7d12f51dba6168bb8784cf9e5592 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 18 Dec 2025 15:16:46 +0000 Subject: [PATCH 50/98] Update base.html --- docs/_layouts/base.html | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index a3f058b0..30d3b2ae 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -13,7 +13,7 @@ - @@ -32,3 +32,4 @@ + From 6b731039be8b590e770a563f00598a2ea0d407b1 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Thu, 18 Dec 2025 15:41:55 +0000 Subject: [PATCH 51/98] Update navigation.yml --- docs/_data/navigation.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 22032ad4..075c7b67 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -25,8 +25,6 @@ projects: - Arm Ambassador Support status: - Published - badges: - - recently_added - title: R-Arm-Community-Support description: "This self-service project boosts the R ecosystem on Windows on Arm\ \ by identifying unsupported packages, upstreaming fixes, and automating builds\u2014\ @@ -46,8 +44,6 @@ projects: - Arm Ambassador Support status: - Published - badges: - - trending - title: AI-Agents description: "This self-service project builds a sandboxed AI agent on Arm hardware\ \ that harnesses appropriately sized LLMs to safely automate complex workflows\u2014\ @@ -67,8 +63,6 @@ projects: - Arm Ambassador Support status: - Published - badges: - - recently_added - title: Haskell-Compiler-Windows-on-Arm description: "This self-service project brings native Glasgow Haskell Compiler\ \ support to Windows on Arm\u2014unlocking efficient Arm-laptop builds, extending\ @@ -89,8 +83,6 @@ projects: - Arm Ambassador Support status: - Published - badges: - - recently_added - title: C-Based-Application-from-Scratch description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use From cbcbfd4c2b49f5b2e2931ccc62137dea9d009e09 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:07:43 +0000 Subject: [PATCH 52/98] Restore Jekyll config file with default settings --- docs/_config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/_config.yml b/docs/_config.yml index a26f5f11..c8ce25b8 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -19,7 +19,7 @@ text_skin: default # "default" (default), "dark", "forest", "ocean", "chocolate", "orange" highlight_theme: default # "default" (default), "tomorrow", "tomorrow-night", "tomorrow-night-eighties", "tomorrow-night-blue", "tomorrow-night-bright" url: -baseurl: /Arm-Developer-Labs +baseurl: title : description: > # this means to ignore newlines until "Language & timezone" Developer Labs @@ -225,3 +225,4 @@ plugins: - jemoji + From 7dc8a7443f09bf29474a56cf1b844069e9b8cc13 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:11:10 +0000 Subject: [PATCH 53/98] Revise project details for AI-Powered Workflow Agent Updated the project details for the AI-Powered Workflow Agent, including title, description, subjects, platform, and support level. --- Projects/Projects/AI-Agents.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/AI-Agents.md b/Projects/Projects/AI-Agents.md index d026e463..02fe8476 100644 --- a/Projects/Projects/AI-Agents.md +++ b/Projects/Projects/AI-Agents.md @@ -55,7 +55,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -67,3 +67,4 @@ To receive the benefits, you must show us your project through our [online form] 3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) + From c6ea5512edeef8a3b96065a55ef6e5d55fa27122 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:11:33 +0000 Subject: [PATCH 54/98] Revise project description and benefits section Updated project details and benefits for the AI-Powered Package Porting Tool. --- Projects/Projects/AI-Powered-Porting-Tool.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/AI-Powered-Porting-Tool.md b/Projects/Projects/AI-Powered-Porting-Tool.md index a4d84c98..e474f62d 100644 --- a/Projects/Projects/AI-Powered-Porting-Tool.md +++ b/Projects/Projects/AI-Powered-Porting-Tool.md @@ -69,6 +69,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From b19f1df0be8c18773768bd1e9d1ceaa223e105aa Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:12:22 +0000 Subject: [PATCH 55/98] Revise AMBA Simulator Framework documentation Updated the AMBA Simulator Framework documentation with enhanced details about the project, prerequisites, resources, and benefits for contributors. --- Projects/Projects/AMBA-Simulator-Framework.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/AMBA-Simulator-Framework.md b/Projects/Projects/AMBA-Simulator-Framework.md index b0781fb9..3ec62ed7 100644 --- a/Projects/Projects/AMBA-Simulator-Framework.md +++ b/Projects/Projects/AMBA-Simulator-Framework.md @@ -54,6 +54,7 @@ Similar projects: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From a00e76a46e24a5872b6fb8d2a313eaaeb5d4b851 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:13:17 +0000 Subject: [PATCH 56/98] Revise Academic Trends Dashboard project details Updated project details and enhanced description for clarity. --- Projects/Projects/Academic-Trends-Dashboard.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Academic-Trends-Dashboard.md b/Projects/Projects/Academic-Trends-Dashboard.md index 620e36ed..c42bfcc5 100644 --- a/Projects/Projects/Academic-Trends-Dashboard.md +++ b/Projects/Projects/Academic-Trends-Dashboard.md @@ -55,6 +55,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From ef2ec955ff080f557ebb342f0a9a2310db6f1ce4 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:13:53 +0000 Subject: [PATCH 57/98] Update project details for Edge AI with NPU --- Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md b/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md index 014da0cd..bbe62a2f 100644 --- a/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md +++ b/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md @@ -74,7 +74,8 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + From a151b5a2cd98ed075a3c9d17b0f2bcb13e47d04a Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:14:25 +0000 Subject: [PATCH 58/98] Revise Architecture Insight Dashboard project details Updated project details and added benefits for contributions. --- Projects/Projects/Architecture-Insight-Dashboard.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Architecture-Insight-Dashboard.md b/Projects/Projects/Architecture-Insight-Dashboard.md index b502173c..af255e97 100644 --- a/Projects/Projects/Architecture-Insight-Dashboard.md +++ b/Projects/Projects/Architecture-Insight-Dashboard.md @@ -63,6 +63,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 18831b0cce96d161a5792908212dc38359b133f8 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:15:11 +0000 Subject: [PATCH 59/98] Revise project details for Arduino IDE on Windows on Arm Updated project metadata and description for clarity and detail. --- Projects/Projects/Arduino-IDE-Windows-on-Arm.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/Arduino-IDE-Windows-on-Arm.md b/Projects/Projects/Arduino-IDE-Windows-on-Arm.md index 3b36429d..55d6d790 100644 --- a/Projects/Projects/Arduino-IDE-Windows-on-Arm.md +++ b/Projects/Projects/Arduino-IDE-Windows-on-Arm.md @@ -68,8 +68,9 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + From 18db9a4269704d3ca33dccb8ff7ce7cf5dcb78fc Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:15:33 +0000 Subject: [PATCH 60/98] Update Bioinformatic-Pipeline-Analysis.md --- Projects/Projects/Bioinformatic-Pipeline-Analysis.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Bioinformatic-Pipeline-Analysis.md b/Projects/Projects/Bioinformatic-Pipeline-Analysis.md index 065f7642..86235285 100644 --- a/Projects/Projects/Bioinformatic-Pipeline-Analysis.md +++ b/Projects/Projects/Bioinformatic-Pipeline-Analysis.md @@ -68,6 +68,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 4b3ea5a2dbebcf31950979462d4f23f96f3ea898 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:16:14 +0000 Subject: [PATCH 61/98] Revise project description and support benefits Updated project details and benefits for clarity. --- Projects/Projects/C-Based-Application-from-Scratch.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/C-Based-Application-from-Scratch.md b/Projects/Projects/C-Based-Application-from-Scratch.md index 06816d0f..e9fecd70 100644 --- a/Projects/Projects/C-Based-Application-from-Scratch.md +++ b/Projects/Projects/C-Based-Application-from-Scratch.md @@ -53,7 +53,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -61,3 +61,4 @@ To receive the benefits, you must show us your project through our [online form] 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). + From 05492266f5ac34e977b919405994646735cf80b3 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:16:53 +0000 Subject: [PATCH 62/98] Add badges section to Edge-AI-On-Mobile.md Added badges section to the Edge AI on Mobile documentation. --- Projects/Projects/Edge-AI-On-Mobile.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/Edge-AI-On-Mobile.md b/Projects/Projects/Edge-AI-On-Mobile.md index 4b7f9113..8974b4e0 100644 --- a/Projects/Projects/Edge-AI-On-Mobile.md +++ b/Projects/Projects/Edge-AI-On-Mobile.md @@ -20,6 +20,7 @@ publication-date: 2025-11-27 license: status: - "Published" +badges: trending --- ## Description @@ -73,4 +74,4 @@ Standout project contributions will result in digital badges for CV building, re To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- \ No newline at end of file +--- From 774579700ca053dc7c4c46b2feea935d1aae43df Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:17:13 +0000 Subject: [PATCH 63/98] Revise benefits description for project contributions Reworded benefits section for clarity and conciseness. --- Projects/Projects/Edge-AI-On-Mobile.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Projects/Projects/Edge-AI-On-Mobile.md b/Projects/Projects/Edge-AI-On-Mobile.md index 8974b4e0..acbc309a 100644 --- a/Projects/Projects/Edge-AI-On-Mobile.md +++ b/Projects/Projects/Edge-AI-On-Mobile.md @@ -69,8 +69,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 9235243867862ddccfaf4054d037b34ff36d7abf Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:17:40 +0000 Subject: [PATCH 64/98] Clarify benefits of project contributions Reworded the benefits section for clarity and emphasis on community contributions. --- Projects/Projects/Ethos-U85-NPU-Applications.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Ethos-U85-NPU-Applications.md b/Projects/Projects/Ethos-U85-NPU-Applications.md index f9f7015d..99c8a0fe 100644 --- a/Projects/Projects/Ethos-U85-NPU-Applications.md +++ b/Projects/Projects/Ethos-U85-NPU-Applications.md @@ -111,7 +111,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 974838a6dbdeb62908ef4e8bff0dc50a6c191a08 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:18:19 +0000 Subject: [PATCH 65/98] Add badges section to Ethos-U85-NPU-Applications.md --- Projects/Projects/Ethos-U85-NPU-Applications.md | 1 + 1 file changed, 1 insertion(+) diff --git a/Projects/Projects/Ethos-U85-NPU-Applications.md b/Projects/Projects/Ethos-U85-NPU-Applications.md index 99c8a0fe..b5107e2f 100644 --- a/Projects/Projects/Ethos-U85-NPU-Applications.md +++ b/Projects/Projects/Ethos-U85-NPU-Applications.md @@ -20,6 +20,7 @@ publication-date: 2025-11-27 license: status: - "Published" +badges: trending donation: --- From b380167ef06df1a872927a74a4f1f59b81d6f193 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:19:09 +0000 Subject: [PATCH 66/98] Revise project details and benefits for FPGA platform Updated project details for the Linux Capable SoC FPGA Prototyping Platform, including benefits and support information. --- Projects/Projects/FPGA-Accellerator-with-DDR.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/FPGA-Accellerator-with-DDR.md b/Projects/Projects/FPGA-Accellerator-with-DDR.md index f5050f41..220c0317 100644 --- a/Projects/Projects/FPGA-Accellerator-with-DDR.md +++ b/Projects/Projects/FPGA-Accellerator-with-DDR.md @@ -42,6 +42,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 932a8991ebe3e1cf8d7ac89842450b35adc2b3d6 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:19:53 +0000 Subject: [PATCH 67/98] Update Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md --- .../Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md b/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md index 53eca7a5..90fe0d68 100644 --- a/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md +++ b/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md @@ -21,6 +21,7 @@ publication-date: 2025-11-27 license: status: - "Published" +badges: trending donation: --- @@ -84,7 +85,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 81dd2c272f3b384c4dcec30569449b6df044e659 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:20:32 +0000 Subject: [PATCH 68/98] Revise HPC Algorithm project details and add badges Updated project metadata and added badges for recognition. Enhanced project description and prerequisites for clarity. --- Projects/Projects/HPC-Algorithm.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/HPC-Algorithm.md b/Projects/Projects/HPC-Algorithm.md index 8c772899..fa232389 100644 --- a/Projects/Projects/HPC-Algorithm.md +++ b/Projects/Projects/HPC-Algorithm.md @@ -18,6 +18,7 @@ publication-date: 2025-05-30 license: status: - "Published" +badges: trending donation: --- @@ -53,6 +54,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 077dc8d75e68346a3969c672252fccda1e5cdeb2 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:21:34 +0000 Subject: [PATCH 69/98] Revise Haskell Compiler project for Windows on Arm Updated the project details for adding Windows on Arm support to the Glasgow Haskell Compiler, including prerequisites and benefits. --- Projects/Projects/Haskell-Compiler-Windows-on-Arm.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md b/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md index 5c61c98c..f06a8ae1 100644 --- a/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md +++ b/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md @@ -68,6 +68,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + From 7077ddee604e6238e653061758fb1a8e3a67b3ee Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:22:03 +0000 Subject: [PATCH 70/98] Revise LLM Benchmark for Arm Server documentation Updated project details and formatting in the LLM Benchmark documentation, including prerequisites, resources, and benefits. --- Projects/Projects/LLM-Benchmark-on-Arm-Server.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/LLM-Benchmark-on-Arm-Server.md b/Projects/Projects/LLM-Benchmark-on-Arm-Server.md index 7855110c..ad9c8868 100644 --- a/Projects/Projects/LLM-Benchmark-on-Arm-Server.md +++ b/Projects/Projects/LLM-Benchmark-on-Arm-Server.md @@ -45,6 +45,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From cbeb7e40bb731d686229777e6f6e24060f7581ee Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:22:27 +0000 Subject: [PATCH 71/98] Revise project description and benefits for OpenSora Updated project details and benefits for OpenSora on AWS Graviton. --- Projects/Projects/Machine-Learning-on-AWS-Graviton.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Machine-Learning-on-AWS-Graviton.md b/Projects/Projects/Machine-Learning-on-AWS-Graviton.md index 21389beb..97b1627c 100644 --- a/Projects/Projects/Machine-Learning-on-AWS-Graviton.md +++ b/Projects/Projects/Machine-Learning-on-AWS-Graviton.md @@ -60,7 +60,7 @@ The aim of this project is to port, benchmark, and optimize a pre-trained ViT mo This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). ## Benefits +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 28906277c30354c540d721067a24520d7096557f Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:23:05 +0000 Subject: [PATCH 72/98] Update NPC-LLM-Runtime.md --- Projects/Projects/NPC-LLM-Runtime.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/NPC-LLM-Runtime.md b/Projects/Projects/NPC-LLM-Runtime.md index 707dc588..1f75a06d 100644 --- a/Projects/Projects/NPC-LLM-Runtime.md +++ b/Projects/Projects/NPC-LLM-Runtime.md @@ -19,6 +19,7 @@ publication-date: 2025-08-28 license: status: - "Published" +badges: trending donation: --- @@ -65,6 +66,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 6523957d9a344da23da383e6df2dd56cac16b943 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:23:31 +0000 Subject: [PATCH 73/98] Revise project details and enhance benefits section Updated project metadata and added project benefits section. --- Projects/Projects/Processor-in-the-Loop-Automotive.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/Processor-in-the-Loop-Automotive.md b/Projects/Projects/Processor-in-the-Loop-Automotive.md index e42fecc7..a6bee43b 100644 --- a/Projects/Projects/Processor-in-the-Loop-Automotive.md +++ b/Projects/Projects/Processor-in-the-Loop-Automotive.md @@ -67,6 +67,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + From 5d3f7fe07e691dda3246f6d71a2269d3d65ff44b Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:23:49 +0000 Subject: [PATCH 74/98] Revise benefits description for project contributions Updated the benefits section to clarify the recognition process. --- Projects/Projects/Python-Porting-Challenge.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Python-Porting-Challenge.md b/Projects/Projects/Python-Porting-Challenge.md index 49572f8d..47a3ab1c 100644 --- a/Projects/Projects/Python-Porting-Challenge.md +++ b/Projects/Projects/Python-Porting-Challenge.md @@ -63,6 +63,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 5aaba48e4ab551de72b7ef377cd232b08d57245a Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:25:21 +0000 Subject: [PATCH 75/98] Revise Quantization-Aware Training project documentation Updated project details and prerequisites for Quantization-Aware Training on Arm devices, including resources and support information. --- Projects/Projects/Quantisation-Aware-Training.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Projects/Quantisation-Aware-Training.md b/Projects/Projects/Quantisation-Aware-Training.md index 8f3eb952..7111a631 100644 --- a/Projects/Projects/Quantisation-Aware-Training.md +++ b/Projects/Projects/Quantisation-Aware-Training.md @@ -59,6 +59,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + From 9e496915c148c1837f008fbdc5bd8205af67f3f0 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:25:56 +0000 Subject: [PATCH 76/98] Update R-Arm-Community-Support.md --- Projects/Projects/R-Arm-Community-Support.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/R-Arm-Community-Support.md b/Projects/Projects/R-Arm-Community-Support.md index baf9dfe8..d38f8368 100644 --- a/Projects/Projects/R-Arm-Community-Support.md +++ b/Projects/Projects/R-Arm-Community-Support.md @@ -74,6 +74,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From cdaf9646fe0771e148479c8bd2163b359456e668 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:26:13 +0000 Subject: [PATCH 77/98] Update Real-Time-Image-Classification.md --- Projects/Projects/Real-Time-Image-Classification.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Real-Time-Image-Classification.md b/Projects/Projects/Real-Time-Image-Classification.md index 83ccc6c0..01c5c857 100644 --- a/Projects/Projects/Real-Time-Image-Classification.md +++ b/Projects/Projects/Real-Time-Image-Classification.md @@ -55,6 +55,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 01935a0d61f6a92d7cb5598e665d7d6ea7386049 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:26:53 +0000 Subject: [PATCH 78/98] Clarify benefits of standout project contributions Updated wording for clarity on project contributions and benefits. --- Projects/Projects/Responsible-AI-and-Yellow-Teaming.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md b/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md index 79cb96c9..ece793bd 100644 --- a/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md +++ b/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md @@ -77,6 +77,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From dd71ca4fa6ec3e83b424bbca155d0ee9b17b2372 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:27:15 +0000 Subject: [PATCH 79/98] Update Sentiment-Analysis-Dashboard.md --- Projects/Projects/Sentiment-Analysis-Dashboard.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Sentiment-Analysis-Dashboard.md b/Projects/Projects/Sentiment-Analysis-Dashboard.md index 00a10050..4892f99e 100644 --- a/Projects/Projects/Sentiment-Analysis-Dashboard.md +++ b/Projects/Projects/Sentiment-Analysis-Dashboard.md @@ -51,6 +51,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 79d5ba63135ac177e0dd0dccc2a5d2d755248098 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:27:41 +0000 Subject: [PATCH 80/98] Update Smart-Voice-Assistant.md --- Projects/Projects/Smart-Voice-Assistant.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Smart-Voice-Assistant.md b/Projects/Projects/Smart-Voice-Assistant.md index d2ca6e57..b2bb3fc9 100644 --- a/Projects/Projects/Smart-Voice-Assistant.md +++ b/Projects/Projects/Smart-Voice-Assistant.md @@ -51,6 +51,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From c28ca858d10f20038a5961c95107a7e9ca3cad9c Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:28:00 +0000 Subject: [PATCH 81/98] Revise SpecINT2017 project details and benefits Updated project metadata and description for SpecINT2017 benchmarking on Arm64, including prerequisites and benefits. --- Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md b/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md index 64f88267..ed9d9d70 100644 --- a/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md +++ b/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md @@ -70,6 +70,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 35eb919b2ed8a02445ecc24a72dddcef1de984f2 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:28:25 +0000 Subject: [PATCH 82/98] Revise Learning Path project details and benefits Updated project details and benefits for the Learning Path tutorial. --- Projects/Projects/Write-A-Learning-Path.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Projects/Write-A-Learning-Path.md b/Projects/Projects/Write-A-Learning-Path.md index b531d347..44e11ae6 100644 --- a/Projects/Projects/Write-A-Learning-Path.md +++ b/Projects/Projects/Write-A-Learning-Path.md @@ -48,6 +48,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 43d13bb516b554ab8c8e3dc68bd9c2c97e8ce3cc Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:28:50 +0000 Subject: [PATCH 83/98] Revise project details for Compliance-Ready Smart Camera Updated project details and deliverables for the Compliance-Ready Smart Camera System, including the importance of compliance with automotive standards and the benefits of project contributions. --- .../Compliance-Ready-Smart-Camera-System.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md b/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md index a7738672..eaa31070 100644 --- a/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md +++ b/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md @@ -57,6 +57,6 @@ Deliverables include: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. From 6dc89bbb33c233b70d0d3a0d543d173955b5db86 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:29:17 +0000 Subject: [PATCH 84/98] Revise Human-Centric Robotics project documentation Updated project details and benefits for the Human-Centric Robotics initiative. --- Projects/Extended-Team-Projects/Human-Centric-Robotics.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Projects/Extended-Team-Projects/Human-Centric-Robotics.md b/Projects/Extended-Team-Projects/Human-Centric-Robotics.md index 9cdab1dc..d637e7db 100644 --- a/Projects/Extended-Team-Projects/Human-Centric-Robotics.md +++ b/Projects/Extended-Team-Projects/Human-Centric-Robotics.md @@ -64,6 +64,7 @@ Team size: 2+ participants ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + From d13bb130187dc8e1b03ec80b2366c85e9b1b8169 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:43:46 +0000 Subject: [PATCH 85/98] Add files via upload --- docs/_posts/2025-05-30-academic-trends-dashboard.md | 8 ++++---- docs/_posts/2025-05-30-ai-agents.md | 7 +++---- docs/_posts/2025-05-30-ai-powered-porting-tool.md | 4 ++-- docs/_posts/2025-05-30-amba-simulator-framework.md | 10 ++++++---- .../2025-05-30-architecture-insight-dashboard.md | 10 ++++++---- docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md | 10 +++++----- .../2025-05-30-bioinformatic-pipeline-analysis.md | 6 ++++-- .../2025-05-30-compliance-ready-smart-camera-system.md | 4 ++-- docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md | 6 ++++-- .../2025-05-30-haskell-compiler-windows-on-arm.md | 8 ++++---- docs/_posts/2025-05-30-hpc-algorithm.md | 7 +++++-- docs/_posts/2025-05-30-human-centric-robotics.md | 4 ++-- docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md | 6 ++++-- .../2025-05-30-machine-learning-on-aws-graviton.md | 8 ++++---- .../2025-05-30-processor-in-the-loop-automotive.md | 8 ++++---- docs/_posts/2025-05-30-quantisation-aware-training.md | 4 ++-- docs/_posts/2025-05-30-r-arm-community-support.md | 10 ++++++---- .../2025-05-30-real-time-image-classification.md | 8 ++++---- .../2025-05-30-responsible-ai-and-yellow-teaming.md | 4 ++-- docs/_posts/2025-05-30-sentiment-analysis-dashboard.md | 4 ++-- docs/_posts/2025-05-30-smart-voice-assistant.md | 10 ++++++---- .../2025-05-30-specint2017-benchmarking-on-arm64.md | 6 ++++-- docs/_posts/2025-05-30-write-a-learning-path.md | 8 ++++---- .../2025-07-11-c-based-application-from-scratch.md | 4 ++-- docs/_posts/2025-08-28-npc-llm-runtime.md | 7 +++++-- docs/_posts/2025-11-03-python-porting-challenge.md | 8 ++++---- .../2025-11-27-always-on-ai-with-ethos-u85-npu.md | 8 ++++---- docs/_posts/2025-11-27-edge-ai-on-mobile.md | 7 +++---- docs/_posts/2025-11-27-ethos-u85-npu-applications.md | 9 +++++---- ...7-game-dev-using-neural-graphics---unreal-engine.md | 9 +++++---- 30 files changed, 117 insertions(+), 95 deletions(-) diff --git a/docs/_posts/2025-05-30-academic-trends-dashboard.md b/docs/_posts/2025-05-30-academic-trends-dashboard.md index 4fdc0e81..604f1118 100644 --- a/docs/_posts/2025-05-30-academic-trends-dashboard.md +++ b/docs/_posts/2025-05-30-academic-trends-dashboard.md @@ -25,7 +25,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -57,11 +57,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -93,6 +93,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-ai-agents.md b/docs/_posts/2025-05-30-ai-agents.md index 7fb847c0..26be05ed 100644 --- a/docs/_posts/2025-05-30-ai-agents.md +++ b/docs/_posts/2025-05-30-ai-agents.md @@ -19,8 +19,7 @@ license: status: - Published donation: -badges: -- Trending +badges: trending layout: article sidebar: nav: projects @@ -57,7 +56,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -101,7 +100,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/docs/_posts/2025-05-30-ai-powered-porting-tool.md b/docs/_posts/2025-05-30-ai-powered-porting-tool.md index 0adf1555..4c6456f3 100644 --- a/docs/_posts/2025-05-30-ai-powered-porting-tool.md +++ b/docs/_posts/2025-05-30-ai-powered-porting-tool.md @@ -70,7 +70,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -118,6 +118,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-amba-simulator-framework.md b/docs/_posts/2025-05-30-amba-simulator-framework.md index f6d01335..88cd6c10 100644 --- a/docs/_posts/2025-05-30-amba-simulator-framework.md +++ b/docs/_posts/2025-05-30-amba-simulator-framework.md @@ -22,7 +22,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Audience Electronic Engineering @@ -55,11 +55,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Audience Electronic Engineering @@ -92,6 +93,7 @@ Similar projects: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-architecture-insight-dashboard.md b/docs/_posts/2025-05-30-architecture-insight-dashboard.md index c9015714..c9d884ba 100644 --- a/docs/_posts/2025-05-30-architecture-insight-dashboard.md +++ b/docs/_posts/2025-05-30-architecture-insight-dashboard.md @@ -25,7 +25,7 @@ layout: article sidebar: nav: projects full_description: |- - + ### Description @@ -65,11 +65,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ### Description @@ -109,6 +110,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md b/docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md index 5e280a8e..c342deaf 100644 --- a/docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md +++ b/docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md @@ -21,12 +21,12 @@ status: - Published donation: badges: -- Trending +- trending layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -71,11 +71,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -120,6 +120,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md b/docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md index 5a065749..e9da6e8b 100644 --- a/docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md +++ b/docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md @@ -69,7 +69,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -119,6 +120,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md b/docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md index 85a3c4be..539ab464 100644 --- a/docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md +++ b/docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md @@ -59,7 +59,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -92,6 +92,6 @@ Deliverables include: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md b/docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md index 8139ce0e..41b544c1 100644 --- a/docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md +++ b/docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md @@ -44,7 +44,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -69,6 +70,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md b/docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md index a498d87e..137983af 100644 --- a/docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md +++ b/docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md @@ -24,7 +24,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -70,11 +70,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -120,6 +120,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-hpc-algorithm.md b/docs/_posts/2025-05-30-hpc-algorithm.md index cc1e89c4..b528b8f0 100644 --- a/docs/_posts/2025-05-30-hpc-algorithm.md +++ b/docs/_posts/2025-05-30-hpc-algorithm.md @@ -18,6 +18,7 @@ publication-date: 2025-05-30 license: status: - Published +badges: trending donation: layout: article sidebar: @@ -53,7 +54,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -87,6 +89,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-human-centric-robotics.md b/docs/_posts/2025-05-30-human-centric-robotics.md index eaed709d..0c04efba 100644 --- a/docs/_posts/2025-05-30-human-centric-robotics.md +++ b/docs/_posts/2025-05-30-human-centric-robotics.md @@ -66,7 +66,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -108,6 +108,6 @@ Team size: 2+ participants ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md b/docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md index e3490410..31f91a63 100644 --- a/docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md +++ b/docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md @@ -46,7 +46,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -72,6 +73,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md b/docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md index 61375674..148ccb56 100644 --- a/docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md +++ b/docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md @@ -24,7 +24,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -62,12 +62,12 @@ full_description: |- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). ## Benefits + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -105,7 +105,7 @@ The aim of this project is to port, benchmark, and optimize a pre-trained ViT mo This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). ## Benefits +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-processor-in-the-loop-automotive.md b/docs/_posts/2025-05-30-processor-in-the-loop-automotive.md index 3a6dd0ad..02f20812 100644 --- a/docs/_posts/2025-05-30-processor-in-the-loop-automotive.md +++ b/docs/_posts/2025-05-30-processor-in-the-loop-automotive.md @@ -26,7 +26,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -69,11 +69,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -116,6 +116,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-quantisation-aware-training.md b/docs/_posts/2025-05-30-quantisation-aware-training.md index 98140faa..3a986fe2 100644 --- a/docs/_posts/2025-05-30-quantisation-aware-training.md +++ b/docs/_posts/2025-05-30-quantisation-aware-training.md @@ -61,7 +61,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -101,6 +101,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-r-arm-community-support.md b/docs/_posts/2025-05-30-r-arm-community-support.md index 4da6d60f..56919c91 100644 --- a/docs/_posts/2025-05-30-r-arm-community-support.md +++ b/docs/_posts/2025-05-30-r-arm-community-support.md @@ -23,7 +23,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -76,11 +76,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -133,6 +134,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-real-time-image-classification.md b/docs/_posts/2025-05-30-real-time-image-classification.md index 4b39214c..1ae250eb 100644 --- a/docs/_posts/2025-05-30-real-time-image-classification.md +++ b/docs/_posts/2025-05-30-real-time-image-classification.md @@ -25,7 +25,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -57,11 +57,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -93,6 +93,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md b/docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md index 10b60ace..f06944de 100644 --- a/docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md +++ b/docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md @@ -79,7 +79,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -139,6 +139,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-sentiment-analysis-dashboard.md b/docs/_posts/2025-05-30-sentiment-analysis-dashboard.md index e2e1e2f8..5b744374 100644 --- a/docs/_posts/2025-05-30-sentiment-analysis-dashboard.md +++ b/docs/_posts/2025-05-30-sentiment-analysis-dashboard.md @@ -51,7 +51,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -80,6 +80,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-smart-voice-assistant.md b/docs/_posts/2025-05-30-smart-voice-assistant.md index bd996564..2a3afb0c 100644 --- a/docs/_posts/2025-05-30-smart-voice-assistant.md +++ b/docs/_posts/2025-05-30-smart-voice-assistant.md @@ -24,7 +24,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -53,11 +53,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -86,6 +87,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md b/docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md index f54a5f93..9ef43f0b 100644 --- a/docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md +++ b/docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md @@ -70,7 +70,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -119,6 +120,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-write-a-learning-path.md b/docs/_posts/2025-05-30-write-a-learning-path.md index 3997aa47..0bde8c2c 100644 --- a/docs/_posts/2025-05-30-write-a-learning-path.md +++ b/docs/_posts/2025-05-30-write-a-learning-path.md @@ -29,7 +29,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -50,11 +50,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -75,6 +75,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-07-11-c-based-application-from-scratch.md b/docs/_posts/2025-07-11-c-based-application-from-scratch.md index 2b1069d7..675a8e33 100644 --- a/docs/_posts/2025-07-11-c-based-application-from-scratch.md +++ b/docs/_posts/2025-07-11-c-based-application-from-scratch.md @@ -54,7 +54,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -94,7 +94,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/docs/_posts/2025-08-28-npc-llm-runtime.md b/docs/_posts/2025-08-28-npc-llm-runtime.md index 0bd5b22e..b3090b32 100644 --- a/docs/_posts/2025-08-28-npc-llm-runtime.md +++ b/docs/_posts/2025-08-28-npc-llm-runtime.md @@ -19,6 +19,7 @@ publication-date: 2025-08-28 license: status: - Published +badges: trending donation: layout: article sidebar: @@ -67,7 +68,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -114,6 +116,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-03-python-porting-challenge.md b/docs/_posts/2025-11-03-python-porting-challenge.md index fba6fa11..4950ccd1 100644 --- a/docs/_posts/2025-11-03-python-porting-challenge.md +++ b/docs/_posts/2025-11-03-python-porting-challenge.md @@ -21,7 +21,7 @@ layout: article sidebar: nav: projects full_description: |- - + ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* @@ -65,11 +65,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* @@ -113,6 +113,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md b/docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md index 36dd2351..db0251b6 100644 --- a/docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md +++ b/docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md @@ -27,7 +27,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -76,12 +76,12 @@ full_description: |- ## Benefits - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -130,7 +130,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-edge-ai-on-mobile.md b/docs/_posts/2025-11-27-edge-ai-on-mobile.md index aedc8670..f90f03fc 100644 --- a/docs/_posts/2025-11-27-edge-ai-on-mobile.md +++ b/docs/_posts/2025-11-27-edge-ai-on-mobile.md @@ -20,6 +20,7 @@ publication-date: 2025-11-27 license: status: - Published +badges: trending layout: article sidebar: nav: projects @@ -70,8 +71,7 @@ full_description: |- ## Benefits - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -123,8 +123,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/docs/_posts/2025-11-27-ethos-u85-npu-applications.md b/docs/_posts/2025-11-27-ethos-u85-npu-applications.md index b6ac6c3b..2d3b1edb 100644 --- a/docs/_posts/2025-11-27-ethos-u85-npu-applications.md +++ b/docs/_posts/2025-11-27-ethos-u85-npu-applications.md @@ -20,12 +20,13 @@ publication-date: 2025-11-27 license: status: - Published +badges: trending donation: layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -113,12 +114,12 @@ full_description: |- ## Benefits - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -206,7 +207,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md b/docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md index 8bde3aba..24988e84 100644 --- a/docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md +++ b/docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md @@ -21,12 +21,13 @@ publication-date: 2025-11-27 license: status: - Published +badges: trending donation: layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -86,12 +87,12 @@ full_description: |- ## Benefits - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -151,7 +152,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file From 0ea7bc5236ee169c361984026ca3d5f8a2674525 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 10:55:11 +0000 Subject: [PATCH 86/98] Delete docs/_posts/2025-05-30-academic-trends-dashboard.md --- .../2025-05-30-academic-trends-dashboard.md | 98 ------------------- 1 file changed, 98 deletions(-) delete mode 100644 docs/_posts/2025-05-30-academic-trends-dashboard.md diff --git a/docs/_posts/2025-05-30-academic-trends-dashboard.md b/docs/_posts/2025-05-30-academic-trends-dashboard.md deleted file mode 100644 index 604f1118..00000000 --- a/docs/_posts/2025-05-30-academic-trends-dashboard.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -title: Design a Dashboard that Tracks the Progression of Academic Papers on Computer Science Over Time -description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends. -subjects: -- Web -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why this is important?** - - The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. - - **Project Summary** - - The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). - - - ## Prequisites - - - Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. - - Hardware: Access to a computer with internet connectivity - - API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. - - ## Resources from Arm and our partners - - - Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) - - Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) - - Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -**Why this is important?** - -The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. - -**Project Summary** - -The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). - - -## Prequisites - -- Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. -- Hardware: Access to a computer with internet connectivity -- API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. - -## Resources from Arm and our partners - -- Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) -- Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) -- Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file From 7cb4a6bc713a77fb8efafa1ccf4f9a85d5fa422c Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 11:05:28 +0000 Subject: [PATCH 87/98] Add files via upload --- docs/_posts/2025-05-30-AI-Agents.md | 9 +- .../2025-05-30-AI-Powered-Porting-Tool.md | 6 +- .../2025-05-30-AMBA-Simulator-Framework.md | 12 +- .../2025-05-30-Academic-Trends-Dashboard.md | 10 +- ...25-05-30-Architecture-Insight-Dashboard.md | 12 +- .../2025-05-30-Arduino-IDE-Windows-on-Arm.md | 12 +- ...5-05-30-Bioinformatic-Pipeline-Analysis.md | 8 +- ...30-Compliance-Ready-Smart-Camera-System.md | 6 +- .../2025-05-30-FPGA-Accellerator-with-DDR.md | 8 +- docs/_posts/2025-05-30-HPC-Algorithm.md | 9 +- ...5-05-30-Haskell-Compiler-Windows-on-Arm.md | 10 +- .../2025-05-30-Human-Centric-Robotics.md | 6 +- .../2025-05-30-LLM-Benchmark-on-Arm-Server.md | 8 +- ...-05-30-Machine-Learning-on-AWS-Graviton.md | 10 +- ...-05-30-Processor-in-the-Loop-Automotive.md | 10 +- .../2025-05-30-Quantisation-Aware-Training.md | 6 +- .../2025-05-30-R-Arm-Community-Support.md | 12 +- ...25-05-30-Real-Time-Image-Classification.md | 10 +- ...05-30-Responsible-AI-and-Yellow-Teaming.md | 6 +- ...2025-05-30-Sentiment-Analysis-Dashboard.md | 6 +- .../2025-05-30-Smart-Voice-Assistant.md | 12 +- ...05-30-SpecINT2017-benchmarking-on-Arm64.md | 8 +- .../2025-05-30-Write-A-Learning-Path.md | 10 +- docs/_posts/2025-05-30-projects.md | 2 +- ...-07-11-C-Based-Application-from-Scratch.md | 6 +- docs/_posts/2025-08-28-NPC-LLM-Runtime.md | 9 +- .../2025-11-03-Python-Porting-Challenge.md | 10 +- ...5-11-27-Always-On-AI-with-Ethos-U85-NPU.md | 270 +++++------ docs/_posts/2025-11-27-Edge-AI-On-Mobile.md | 259 ++++++----- .../2025-11-27-Ethos-U85-NPU-Applications.md | 423 +++++++++--------- ...Dev-Using-Neural-Graphics-Unreal-Engine.md | 158 +++++++ 31 files changed, 761 insertions(+), 582 deletions(-) create mode 100644 docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md diff --git a/docs/_posts/2025-05-30-AI-Agents.md b/docs/_posts/2025-05-30-AI-Agents.md index 9e5429ed..72172e58 100644 --- a/docs/_posts/2025-05-30-AI-Agents.md +++ b/docs/_posts/2025-05-30-AI-Agents.md @@ -1,5 +1,5 @@ --- -title: AI-Powered Workflow Agent in a Sandboxed Environment +title: AI-Agents description: This self-service project builds a sandboxed AI agent on Arm hardware that harnesses appropriately sized LLMs to safely automate complex workflows—from DevOps pipelines to e-commerce tasks—demonstrating secure, efficient automation on accessible Arm platforms. subjects: - ML @@ -19,8 +19,7 @@ license: status: - Published donation: -badges: -- trending +badges: trending layout: article sidebar: nav: projects @@ -57,7 +56,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -101,7 +100,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md index 0adf1555..82c890c4 100644 --- a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md +++ b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md @@ -1,5 +1,5 @@ --- -title: AI-Powered Package Porting Tool for the Arm Architectures +title: AI-Powered-Porting-Tool description: This self-service project creates an AI-driven porting engine that analyzes package dependencies, auto-generates fixes, and submits pull requests—accelerating native macOS and Windows-on-Arm support for bioinformatics and R software so researchers can run demanding workflows directly on modern Arm devices. subjects: - CI-CD @@ -70,7 +70,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -118,6 +118,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md index ac62139f..3b66cc46 100644 --- a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md +++ b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md @@ -1,5 +1,5 @@ --- -title: AMBA Infrastructure Design and Simulation Framework +title: AMBA-Simulator-Framework description: This self-guided hardware project has you implement, simulate, and FPGA-prototype a Verilog AMBA bus—from simple APB to advanced CHI—sharpening hands-on expertise with Arm’s interconnect backbone and yielding a reusable reference design for future embedded systems. subjects: - Virtual Hardware @@ -22,7 +22,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Audience Electronic Engineering @@ -55,11 +55,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Audience Electronic Engineering @@ -92,6 +93,7 @@ Similar projects: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md index 5a4bd9b1..b79f62bc 100644 --- a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md +++ b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md @@ -1,5 +1,5 @@ --- -title: Design a Dashboard that Tracks the Progression of Academic Papers on Computer Science Over Time +title: Academic-Trends-Dashboard description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends. subjects: - Web @@ -25,7 +25,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -57,11 +57,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -93,6 +93,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md index e748bd70..16b56442 100644 --- a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md +++ b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md @@ -1,5 +1,5 @@ --- -title: Develop an Arm Architecture Insight Dashboard +title: Architecture-Insight-Dashboard description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best. subjects: - Performance and Architecture @@ -25,7 +25,7 @@ layout: article sidebar: nav: projects full_description: |- - + ### Description @@ -65,11 +65,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ### Description @@ -109,6 +110,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md index 6c793fa1..a1e2146a 100644 --- a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md +++ b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md @@ -1,5 +1,5 @@ --- -title: Porting and Optimizing Arduino IDE for Windows on Arm +title: Arduino-IDE-Windows-on-Arm description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions. subjects: - Performance and Architecture @@ -19,14 +19,14 @@ publication-date: 2025-05-30 license: status: - Published +donation: badges: - trending -donation: layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -71,11 +71,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -120,6 +120,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md index 5a065749..66ef2921 100644 --- a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md +++ b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md @@ -1,5 +1,5 @@ --- -title: Benchmarking Bioconda Packages for Arm64 in Bioinformatics Pipelines +title: Bioinformatic-Pipeline-Analysis description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines. subjects: - Performance and Architecture @@ -69,7 +69,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -119,6 +120,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md index 85a3c4be..316c87a8 100644 --- a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md +++ b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md @@ -1,5 +1,5 @@ --- -title: End-to-End Computer Vision System for Functional Safety +title: Compliance-Ready-Smart-Camera-System description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles. subjects: - Security @@ -59,7 +59,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -92,6 +92,6 @@ Deliverables include: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md index 8139ce0e..4890eb01 100644 --- a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md +++ b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md @@ -1,5 +1,5 @@ --- -title: Linux Capable SoC FPGA Prototyping Platform with DDR Memory +title: FPGA-Accellerator-with-DDR description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance. subjects: - Virtual Hardware @@ -44,7 +44,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -69,6 +70,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-HPC-Algorithm.md b/docs/_posts/2025-05-30-HPC-Algorithm.md index cc1e89c4..3445c23e 100644 --- a/docs/_posts/2025-05-30-HPC-Algorithm.md +++ b/docs/_posts/2025-05-30-HPC-Algorithm.md @@ -1,5 +1,5 @@ --- -title: Optimise Performance of an Algorithm Used in High-Performance Compute Using Scalable Vector Extensions (SVE / SVE2) +title: HPC-Algorithm description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains. subjects: - Performance and Architecture @@ -19,6 +19,7 @@ license: status: - Published donation: +badges: trending layout: article sidebar: nav: projects @@ -53,7 +54,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -87,6 +89,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md index 971f3f70..60ed1e4a 100644 --- a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md +++ b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md @@ -1,5 +1,5 @@ --- -title: Adding Windows on Arm Support to the Glasgow Haskell Compiler (GHC) +title: Haskell-Compiler-Windows-on-Arm description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration. subjects: - Migration to Arm @@ -24,7 +24,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -70,11 +70,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -120,6 +120,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Human-Centric-Robotics.md b/docs/_posts/2025-05-30-Human-Centric-Robotics.md index eaed709d..c06979f0 100644 --- a/docs/_posts/2025-05-30-Human-Centric-Robotics.md +++ b/docs/_posts/2025-05-30-Human-Centric-Robotics.md @@ -1,5 +1,5 @@ --- -title: Human-Centric Robotics – Urban Deployment & Socioeconomic Modelling +title: Human-Centric-Robotics description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services. subjects: - ML @@ -66,7 +66,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -108,6 +108,6 @@ Team size: 2+ participants ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md index e3490410..7c87bffe 100644 --- a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md +++ b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md @@ -1,5 +1,5 @@ --- -title: LLM Benchmark for Arm Server +title: LLM-Benchmark-on-Arm-Server description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads. subjects: - ML @@ -46,7 +46,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -72,6 +73,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md index ba62a04e..5aedb7ef 100644 --- a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md +++ b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md @@ -1,5 +1,5 @@ --- -title: Efficient Inference of text-to-video (OpenSora) on AWS Graviton Instances +title: Machine-Learning-on-AWS-Graviton description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads. subjects: - ML @@ -24,7 +24,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -62,12 +62,12 @@ full_description: |- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). ## Benefits + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -105,7 +105,7 @@ The aim of this project is to port, benchmark, and optimize a pre-trained ViT mo This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). ## Benefits +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md index 2a6236f6..9cc92a5b 100644 --- a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md +++ b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md @@ -1,5 +1,5 @@ --- -title: Processor in the Loop Automotive Controller on an Arm Cortex M7 Fast Model +title: Processor-in-the-Loop-Automotive description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor. subjects: - Embedded Linux @@ -26,7 +26,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -69,11 +69,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -116,6 +116,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md index 98140faa..66cfd281 100644 --- a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md +++ b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md @@ -1,5 +1,5 @@ --- -title: 'Quantization-Aware Training for Mobile Deployment: Deploying Lightweight Models on Arm' +title: Quantisation-Aware-Training description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community. subjects: - ML @@ -61,7 +61,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -101,6 +101,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-R-Arm-Community-Support.md b/docs/_posts/2025-05-30-R-Arm-Community-Support.md index 7bd739c5..3061387b 100644 --- a/docs/_posts/2025-05-30-R-Arm-Community-Support.md +++ b/docs/_posts/2025-05-30-R-Arm-Community-Support.md @@ -1,5 +1,5 @@ --- -title: Improving R Support for the Windows on Arm Community +title: R-Arm-Community-Support description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops. subjects: - Performance and Architecture @@ -23,7 +23,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -76,11 +76,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -133,6 +134,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md index 3658d146..19478c90 100644 --- a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md +++ b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md @@ -1,5 +1,5 @@ --- -title: Running Real-Time Image Classification on Arm Cortex-M with CMSIS-NN +title: Real-Time-Image-Classification description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers. subjects: - ML @@ -25,7 +25,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -57,11 +57,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -93,6 +93,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md index 10b60ace..f356ec62 100644 --- a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md +++ b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md @@ -1,5 +1,5 @@ --- -title: Responsible AI and Yellow Teaming +title: Responsible-AI-and-Yellow-Teaming description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle. subjects: - ML @@ -79,7 +79,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -139,6 +139,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md index e2e1e2f8..1f910af8 100644 --- a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md +++ b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md @@ -1,5 +1,5 @@ --- -title: Create a Sentiment Analysis Dashboard for Keywords Based on the Semiconductor Industry +title: Sentiment-Analysis-Dashboard description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes. subjects: - ML @@ -51,7 +51,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -80,6 +80,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md index 438f47c9..a3394f30 100644 --- a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md +++ b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md @@ -1,5 +1,5 @@ --- -title: Smart Voice Assistant Using TinyML on Cortex-M55 +title: Smart-Voice-Assistant description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use. subjects: - ML @@ -24,7 +24,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -53,11 +53,12 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -86,6 +87,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md index f54a5f93..01f66357 100644 --- a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md +++ b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md @@ -1,5 +1,5 @@ --- -title: 'SpecINT2017 Benchmarking on Arm64: Evaluating Compiler and Workload Performance' +title: SpecINT2017-benchmarking-on-Arm64 description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks. subjects: - Performance and Architecture @@ -70,7 +70,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -119,6 +120,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Write-A-Learning-Path.md b/docs/_posts/2025-05-30-Write-A-Learning-Path.md index a14f7673..b372f0b5 100644 --- a/docs/_posts/2025-05-30-Write-A-Learning-Path.md +++ b/docs/_posts/2025-05-30-Write-A-Learning-Path.md @@ -1,5 +1,5 @@ --- -title: Write an Educational Tutorial (Learning Path) of your Choice +title: Write-A-Learning-Path description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills. subjects: - Libraries @@ -29,7 +29,7 @@ layout: article sidebar: nav: projects full_description: |- - + ## Description @@ -50,11 +50,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ## Description @@ -75,6 +75,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-projects.md b/docs/_posts/2025-05-30-projects.md index 530bb581..fede046c 100644 --- a/docs/_posts/2025-05-30-projects.md +++ b/docs/_posts/2025-05-30-projects.md @@ -1,5 +1,5 @@ --- -title: 'Find industry relevant challenges on Arm ' +title: projects filter: project publication-date: 2025-05-30 layout: article diff --git a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md index 2b1069d7..e9988d3a 100644 --- a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md +++ b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md @@ -1,5 +1,5 @@ --- -title: Create a minimal C-Based Project for Raspberry Pi +title: C-Based-Application-from-Scratch description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible. subjects: - Performance and Architecture @@ -54,7 +54,7 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -94,7 +94,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md index 0bd5b22e..b2124229 100644 --- a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md +++ b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md @@ -1,5 +1,5 @@ --- -title: On-Device LLMs for Real-Time NPC Interaction in Games +title: NPC-LLM-Runtime description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions. subjects: - ML @@ -19,6 +19,7 @@ publication-date: 2025-08-28 license: status: - Published +badges: trending donation: layout: article sidebar: @@ -67,7 +68,8 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- @@ -114,6 +116,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-03-Python-Porting-Challenge.md b/docs/_posts/2025-11-03-Python-Porting-Challenge.md index 9d9dedf7..238998f6 100644 --- a/docs/_posts/2025-11-03-Python-Porting-Challenge.md +++ b/docs/_posts/2025-11-03-Python-Porting-Challenge.md @@ -1,5 +1,5 @@ --- -title: Python Package Porting Challenge +title: Python-Porting-Challenge description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support. subjects: - Libraries @@ -21,7 +21,7 @@ layout: article sidebar: nav: projects full_description: |- - + ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* @@ -65,11 +65,11 @@ full_description: |- ## Benefits - Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. --- - + ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* @@ -113,6 +113,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md index f236fec1..27efe0e4 100644 --- a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md +++ b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md @@ -1,136 +1,136 @@ ---- -title: 'Edge AI with NPU: always-on-AI with ExecuTorch on Cortex-M55 + Ethos-U85 → Cortex-A' -description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. -subjects: -- ML -- Performance and Architecture -- Embedded Linux -- RTOS Fundamentals -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why is this important?** - - The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: - - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. - - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. - - This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. - - **Project Summary** - - Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: - - 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. - 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. - 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). - - *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* - - Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. - - You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. - - Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? - - ## What will you use? - You should either be familiar with, or willing to learn about, the following: - - Programming: Python, C++, Embedded C - - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. - - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS - - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why is this important?** - -The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: -- A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. -- When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. - -This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. - -**Project Summary** - -Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: - -1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. -2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. -3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). - -*Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* - -Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. - -You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. - -Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? - -## What will you use? -You should either be familiar with, or willing to learn about, the following: -- Programming: Python, C++, Embedded C -- ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. -- Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS - - -## Resources from Arm and our partners -- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) -- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) -- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - +--- +title: Always-On-AI-with-Ethos-U85-NPU +description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. +subjects: +- ML +- Performance and Architecture +- Embedded Linux +- RTOS Fundamentals +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why is this important?** + + The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: + - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. + - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. + + This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. + + **Project Summary** + + Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: + + 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. + 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. + 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). + + *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* + + Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. + + You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. + + Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? + + ## What will you use? + You should either be familiar with, or willing to learn about, the following: + - Programming: Python, C++, Embedded C + - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. + - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS + + + ## Resources from Arm and our partners + - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) + - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) + - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description + +**Why is this important?** + +The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: +- A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. +- When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. + +This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. + +**Project Summary** + +Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: + +1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. +2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. +3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). + +*Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* + +Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. + +You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. + +Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? + +## What will you use? +You should either be familiar with, or willing to learn about, the following: +- Programming: Python, C++, Embedded C +- ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. +- Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS + + +## Resources from Arm and our partners +- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) +- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) +- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) +- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) +- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md index 86e72c22..458a8f1c 100644 --- a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md +++ b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md @@ -1,131 +1,130 @@ ---- -title: 'SME2 on vivo X300: Mobile Edge AI Projects for multi-modal inference, built on Arm Lumex' -description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required. -subjects: -- ML -- Performance and Architecture -- Libraries -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- AI -- IoT -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - ### Why is this important? - - SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. - - [SME2](https://www.arm.com/technologies/sme2) - - The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. - - [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) - - ### Project Summary - - Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. - - Example project areas: - - Real-time video semantic segmentation (e.g., background removal + AR compositing) - - Live object detection + natural-language description (text summary of what the camera sees) - - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition - - On-device lightweight LLM or encoder-only transformer processing for mobile assistants - - Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. - - Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. - - --- - - ## Resources from Arm and our partners - - - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) - - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) - - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) - - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) - - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - --- ---- -## Description - -### Why is this important? - -SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. - -[SME2](https://www.arm.com/technologies/sme2) - -The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. - -[vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) - -### Project Summary - -Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. - -Example project areas: - - Real-time video semantic segmentation (e.g., background removal + AR compositing) - - Live object detection + natural-language description (text summary of what the camera sees) - - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition - - On-device lightweight LLM or encoder-only transformer processing for mobile assistants - -Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. - -Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. - ---- - -## Resources from Arm and our partners - -- Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) -- Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) -- Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) -- Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) -- Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) - ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - +--- +title: Edge-AI-On-Mobile +description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required. +subjects: +- ML +- Performance and Architecture +- Libraries +requires-team: +- No +platform: +- Mobile, Graphics, and Gaming +- AI +- IoT +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + ### Why is this important? + + SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. + + [SME2](https://www.arm.com/technologies/sme2) + + The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. + + [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) + + ### Project Summary + + Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. + + Example project areas: + - Real-time video semantic segmentation (e.g., background removal + AR compositing) + - Live object detection + natural-language description (text summary of what the camera sees) + - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition + - On-device lightweight LLM or encoder-only transformer processing for mobile assistants + + Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. + + Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. + + --- + + ## Resources from Arm and our partners + + - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) + - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) + - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) + - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) + - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) + + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + --- +--- +## Description + +### Why is this important? + +SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. + +[SME2](https://www.arm.com/technologies/sme2) + +The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. + +[vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) + +### Project Summary + +Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. + +Example project areas: + - Real-time video semantic segmentation (e.g., background removal + AR compositing) + - Live object detection + natural-language description (text summary of what the camera sees) + - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition + - On-device lightweight LLM or encoder-only transformer processing for mobile assistants + +Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. + +Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. + +--- + +## Resources from Arm and our partners + +- Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) +- Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) +- Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) +- Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) +- Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) +- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) +- Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) + +--- + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + --- \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md index 08fc13d1..a620c800 100644 --- a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md +++ b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md @@ -1,212 +1,213 @@ ---- -title: 'Ethos-U85 NPU Applications with TOSA Model Explorer: Exploring Next-Gen Edge AI Inference' -description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why is this important?** - - The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. - - This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. - - [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) - - **Project Summary** - - Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. - - Your project should include: - - 1. Model Deployment and Optimization - Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: - - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. - - The Vela compiler for optimization. - - These tools can be used to: - - Convert and visualize model graphs in TOSA format. - - Identify unsupported operators. - - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. - - Run Vela for optimized compilation targeting Ethos-U85. - - 2. Application Demonstration - Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: - - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). - - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. - - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. - - 3. Analysis and Benchmarking - Report quantitative results on: - - Inference latency, throughput (FPS or tokens/s), and memory footprint. - - Power efficiency under load (optional). - - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). - - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. - - --- - - ## What kind of projects should you target? - - To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: - - - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. - - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. - - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. - - *Example:* 512×512 semantic segmentation or multi-object detection. - - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. - - *Example:* large MLP heads or transformer token mixers. - - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. - - The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. - - --- - - ## What will you use? - You should be familiar with, or willing to learn about: - - Programming: Python, C/C++ - - ExecuTorch or TensorFlow Lite (Micro/LiteRT) - - Techniques for optimising AI models for the edge (quantization, pruning, etc.) - - Optimization Tools: - - TOSA Model Explorer - - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) - - Vela compiler for Ethos-U - - Bare-metal or RTOS (e.g., Zephyr) - - --- - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -**Why is this important?** - -The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. - -This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. - -[Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) - -**Project Summary** - -Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. - -Your project should include: - -1. Model Deployment and Optimization - Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: - - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. - - The Vela compiler for optimization. - - These tools can be used to: - - Convert and visualize model graphs in TOSA format. - - Identify unsupported operators. - - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. - - Run Vela for optimized compilation targeting Ethos-U85. - -2. Application Demonstration - Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: - - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). - - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. - - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. - -3. Analysis and Benchmarking - Report quantitative results on: - - Inference latency, throughput (FPS or tokens/s), and memory footprint. - - Power efficiency under load (optional). - - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). - - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. - ---- - -## What kind of projects should you target? - -To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: - -- Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. - - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. -- High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. - - *Example:* 512×512 semantic segmentation or multi-object detection. -- Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. - - *Example:* large MLP heads or transformer token mixers. -- Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. - -The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. - ---- - -## What will you use? -You should be familiar with, or willing to learn about: -- Programming: Python, C/C++ -- ExecuTorch or TensorFlow Lite (Micro/LiteRT) -- Techniques for optimising AI models for the edge (quantization, pruning, etc.) -- Optimization Tools: - - TOSA Model Explorer - - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) - - Vela compiler for Ethos-U -- Bare-metal or RTOS (e.g., Zephyr) - ---- - -## Resources from Arm and our partners -- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) -- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) -- Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) -- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - +--- +title: Ethos-U85-NPU-Applications +description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + **Why is this important?** + + The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. + + This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. + + [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) + + **Project Summary** + + Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. + + Your project should include: + + 1. Model Deployment and Optimization + Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: + - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. + - The Vela compiler for optimization. + + These tools can be used to: + - Convert and visualize model graphs in TOSA format. + - Identify unsupported operators. + - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. + - Run Vela for optimized compilation targeting Ethos-U85. + + 2. Application Demonstration + Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: + - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). + - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. + - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. + + 3. Analysis and Benchmarking + Report quantitative results on: + - Inference latency, throughput (FPS or tokens/s), and memory footprint. + - Power efficiency under load (optional). + - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). + - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. + + --- + + ## What kind of projects should you target? + + To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: + + - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. + - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. + - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. + - *Example:* 512×512 semantic segmentation or multi-object detection. + - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. + - *Example:* large MLP heads or transformer token mixers. + - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. + + The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. + + --- + + ## What will you use? + You should be familiar with, or willing to learn about: + - Programming: Python, C/C++ + - ExecuTorch or TensorFlow Lite (Micro/LiteRT) + - Techniques for optimising AI models for the edge (quantization, pruning, etc.) + - Optimization Tools: + - TOSA Model Explorer + - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) + - Vela compiler for Ethos-U + - Bare-metal or RTOS (e.g., Zephyr) + + --- + + ## Resources from Arm and our partners + - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) + - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) + - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) + - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +## Description + +**Why is this important?** + +The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. + +This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. + +[Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) + +**Project Summary** + +Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. + +Your project should include: + +1. Model Deployment and Optimization + Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: + - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. + - The Vela compiler for optimization. + + These tools can be used to: + - Convert and visualize model graphs in TOSA format. + - Identify unsupported operators. + - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. + - Run Vela for optimized compilation targeting Ethos-U85. + +2. Application Demonstration + Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: + - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). + - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. + - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. + +3. Analysis and Benchmarking + Report quantitative results on: + - Inference latency, throughput (FPS or tokens/s), and memory footprint. + - Power efficiency under load (optional). + - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). + - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. + +--- + +## What kind of projects should you target? + +To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: + +- Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. + - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. +- High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. + - *Example:* 512×512 semantic segmentation or multi-object detection. +- Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. + - *Example:* large MLP heads or transformer token mixers. +- Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. + +The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. + +--- + +## What will you use? +You should be familiar with, or willing to learn about: +- Programming: Python, C/C++ +- ExecuTorch or TensorFlow Lite (Micro/LiteRT) +- Techniques for optimising AI models for the edge (quantization, pruning, etc.) +- Optimization Tools: + - TOSA Model Explorer + - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) + - Vela compiler for Ethos-U +- Bare-metal or RTOS (e.g., Zephyr) + +--- + +## Resources from Arm and our partners +- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) +- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) +- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) +- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) +- Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) +- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) +--- + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md new file mode 100644 index 00000000..58bcca09 --- /dev/null +++ b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md @@ -0,0 +1,158 @@ +--- +title: Game-Dev-Using-Neural-Graphics-&-Unreal-Engine +description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline. +subjects: +- ML +- Gaming +- Libraries +- Graphics +requires-team: +- No +platform: +- Mobile, Graphics, and Gaming +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + ### Why is this important? + + Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. + + Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. + + [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) + + Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. + + Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. + + ### Project Summary + + Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: + - **Near-identical visuals at lower resolution** (render low → upscale with NSS) + + Document your progress and findings and consider alternative applications of the neural technology within games development. + + Attempt different environments and objects. For example: + + - Daytime vs night + - Urban city, jungle forest, ocean floor, alien planet, building interiors + - Complex lighting and shadows + - NPCs with detailed clothing, faces, hair. Include animations. + + Make your scenes dynamic with particle effects, shadows, physics and motion. + + --- + + ## Pre-requisites + - Laptop/PC/Mobile for Android Unreal Engine game development + - Willingness to learn about games development and graphics, and the increasing use of AI in these fields. + + --- + + ## Resources from Arm and partners + - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) + - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) + - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) + - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) + - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) + - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) + - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) + - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) + - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) + - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) + + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +## Description + +### Why is this important? + +Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. + +Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. + +[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) + +Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. + +Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. + +### Project Summary + +Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: +- **Near-identical visuals at lower resolution** (render low → upscale with NSS) + +Document your progress and findings and consider alternative applications of the neural technology within games development. + +Attempt different environments and objects. For example: + +- Daytime vs night +- Urban city, jungle forest, ocean floor, alien planet, building interiors +- Complex lighting and shadows +- NPCs with detailed clothing, faces, hair. Include animations. + +Make your scenes dynamic with particle effects, shadows, physics and motion. + +--- + +## Pre-requisites +- Laptop/PC/Mobile for Android Unreal Engine game development +- Willingness to learn about games development and graphics, and the increasing use of AI in these fields. + +--- + +## Resources from Arm and partners +- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) +- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) +- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) +- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) +- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) +- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) +- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) +- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) +- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) +- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) + +--- + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file From 1092cb6cd211ba04bb5c6d7beff8ea70b7678267 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 11:49:27 +0000 Subject: [PATCH 88/98] Delete docs/_posts directory --- docs/_posts/2025-05-30-AI-Agents.md | 113 ---------- .../2025-05-30-AI-Powered-Porting-Tool.md | 123 ---------- .../2025-05-30-AMBA-Simulator-Framework.md | 99 -------- .../2025-05-30-Academic-Trends-Dashboard.md | 98 -------- ...25-05-30-Architecture-Insight-Dashboard.md | 116 ---------- .../2025-05-30-Arduino-IDE-Windows-on-Arm.md | 125 ---------- ...5-05-30-Bioinformatic-Pipeline-Analysis.md | 126 ----------- ...30-Compliance-Ready-Smart-Camera-System.md | 97 -------- .../2025-05-30-FPGA-Accellerator-with-DDR.md | 76 ------- docs/_posts/2025-05-30-HPC-Algorithm.md | 95 -------- ...5-05-30-Haskell-Compiler-Windows-on-Arm.md | 125 ---------- .../2025-05-30-Human-Centric-Robotics.md | 113 ---------- .../2025-05-30-LLM-Benchmark-on-Arm-Server.md | 79 ------- ...-05-30-Machine-Learning-on-AWS-Graviton.md | 111 --------- ...-05-30-Processor-in-the-Loop-Automotive.md | 121 ---------- .../2025-05-30-Quantisation-Aware-Training.md | 106 --------- .../2025-05-30-R-Arm-Community-Support.md | 140 ------------ ...25-05-30-Real-Time-Image-Classification.md | 98 -------- ...05-30-Responsible-AI-and-Yellow-Teaming.md | 144 ------------ ...2025-05-30-Sentiment-Analysis-Dashboard.md | 85 ------- .../2025-05-30-Smart-Voice-Assistant.md | 93 -------- ...05-30-SpecINT2017-benchmarking-on-Arm64.md | 126 ----------- .../2025-05-30-Write-A-Learning-Path.md | 80 ------- docs/_posts/2025-05-30-ai-agents.md | 113 ---------- .../2025-05-30-ai-powered-porting-tool.md | 123 ---------- .../2025-05-30-amba-simulator-framework.md | 99 -------- ...25-05-30-architecture-insight-dashboard.md | 116 ---------- .../2025-05-30-arduino-ide-windows-on-arm.md | 125 ---------- ...5-05-30-bioinformatic-pipeline-analysis.md | 126 ----------- ...30-compliance-ready-smart-camera-system.md | 97 -------- .../2025-05-30-fpga-accellerator-with-ddr.md | 76 ------- ...5-05-30-haskell-compiler-windows-on-arm.md | 125 ---------- docs/_posts/2025-05-30-hpc-algorithm.md | 95 -------- .../2025-05-30-human-centric-robotics.md | 113 ---------- .../2025-05-30-llm-benchmark-on-arm-server.md | 79 ------- ...-05-30-machine-learning-on-aws-graviton.md | 111 --------- ...-05-30-processor-in-the-loop-automotive.md | 121 ---------- docs/_posts/2025-05-30-projects.md | 13 -- .../2025-05-30-quantisation-aware-training.md | 106 --------- .../2025-05-30-r-arm-community-support.md | 140 ------------ ...25-05-30-real-time-image-classification.md | 98 -------- ...05-30-responsible-ai-and-yellow-teaming.md | 144 ------------ ...2025-05-30-sentiment-analysis-dashboard.md | 85 ------- .../2025-05-30-smart-voice-assistant.md | 93 -------- ...05-30-specint2017-benchmarking-on-arm64.md | 126 ----------- .../2025-05-30-write-a-learning-path.md | 80 ------- ...-07-11-C-Based-Application-from-Scratch.md | 103 --------- ...-07-11-c-based-application-from-scratch.md | 103 --------- docs/_posts/2025-08-28-NPC-LLM-Runtime.md | 122 ---------- docs/_posts/2025-08-28-npc-llm-runtime.md | 122 ---------- .../2025-11-03-Python-Porting-Challenge.md | 118 ---------- .../2025-11-03-python-porting-challenge.md | 118 ---------- ...5-11-27-Always-On-AI-with-Ethos-U85-NPU.md | 136 ----------- docs/_posts/2025-11-27-Edge-AI-On-Mobile.md | 130 ----------- .../2025-11-27-Ethos-U85-NPU-Applications.md | 213 ------------------ ...v-Using-Neural-Graphics-&-Unreal-Engine.md | 157 ------------- ...Dev-Using-Neural-Graphics-Unreal-Engine.md | 158 ------------- ...5-11-27-always-on-ai-with-ethos-u85-npu.md | 136 ----------- docs/_posts/2025-11-27-edge-ai-on-mobile.md | 130 ----------- .../2025-11-27-ethos-u85-npu-applications.md | 213 ------------------ ...v-using-neural-graphics---unreal-engine.md | 158 ------------- 61 files changed, 7010 deletions(-) delete mode 100644 docs/_posts/2025-05-30-AI-Agents.md delete mode 100644 docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md delete mode 100644 docs/_posts/2025-05-30-AMBA-Simulator-Framework.md delete mode 100644 docs/_posts/2025-05-30-Academic-Trends-Dashboard.md delete mode 100644 docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md delete mode 100644 docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md delete mode 100644 docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md delete mode 100644 docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md delete mode 100644 docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md delete mode 100644 docs/_posts/2025-05-30-HPC-Algorithm.md delete mode 100644 docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md delete mode 100644 docs/_posts/2025-05-30-Human-Centric-Robotics.md delete mode 100644 docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md delete mode 100644 docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md delete mode 100644 docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md delete mode 100644 docs/_posts/2025-05-30-Quantisation-Aware-Training.md delete mode 100644 docs/_posts/2025-05-30-R-Arm-Community-Support.md delete mode 100644 docs/_posts/2025-05-30-Real-Time-Image-Classification.md delete mode 100644 docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md delete mode 100644 docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md delete mode 100644 docs/_posts/2025-05-30-Smart-Voice-Assistant.md delete mode 100644 docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md delete mode 100644 docs/_posts/2025-05-30-Write-A-Learning-Path.md delete mode 100644 docs/_posts/2025-05-30-ai-agents.md delete mode 100644 docs/_posts/2025-05-30-ai-powered-porting-tool.md delete mode 100644 docs/_posts/2025-05-30-amba-simulator-framework.md delete mode 100644 docs/_posts/2025-05-30-architecture-insight-dashboard.md delete mode 100644 docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md delete mode 100644 docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md delete mode 100644 docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md delete mode 100644 docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md delete mode 100644 docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md delete mode 100644 docs/_posts/2025-05-30-hpc-algorithm.md delete mode 100644 docs/_posts/2025-05-30-human-centric-robotics.md delete mode 100644 docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md delete mode 100644 docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md delete mode 100644 docs/_posts/2025-05-30-processor-in-the-loop-automotive.md delete mode 100644 docs/_posts/2025-05-30-projects.md delete mode 100644 docs/_posts/2025-05-30-quantisation-aware-training.md delete mode 100644 docs/_posts/2025-05-30-r-arm-community-support.md delete mode 100644 docs/_posts/2025-05-30-real-time-image-classification.md delete mode 100644 docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md delete mode 100644 docs/_posts/2025-05-30-sentiment-analysis-dashboard.md delete mode 100644 docs/_posts/2025-05-30-smart-voice-assistant.md delete mode 100644 docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md delete mode 100644 docs/_posts/2025-05-30-write-a-learning-path.md delete mode 100644 docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md delete mode 100644 docs/_posts/2025-07-11-c-based-application-from-scratch.md delete mode 100644 docs/_posts/2025-08-28-NPC-LLM-Runtime.md delete mode 100644 docs/_posts/2025-08-28-npc-llm-runtime.md delete mode 100644 docs/_posts/2025-11-03-Python-Porting-Challenge.md delete mode 100644 docs/_posts/2025-11-03-python-porting-challenge.md delete mode 100644 docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md delete mode 100644 docs/_posts/2025-11-27-Edge-AI-On-Mobile.md delete mode 100644 docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md delete mode 100644 docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md delete mode 100644 docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md delete mode 100644 docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md delete mode 100644 docs/_posts/2025-11-27-edge-ai-on-mobile.md delete mode 100644 docs/_posts/2025-11-27-ethos-u85-npu-applications.md delete mode 100644 docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md diff --git a/docs/_posts/2025-05-30-AI-Agents.md b/docs/_posts/2025-05-30-AI-Agents.md deleted file mode 100644 index 72172e58..00000000 --- a/docs/_posts/2025-05-30-AI-Agents.md +++ /dev/null @@ -1,113 +0,0 @@ ---- -title: AI-Agents -description: This self-service project builds a sandboxed AI agent on Arm hardware that harnesses appropriately sized LLMs to safely automate complex workflows—from DevOps pipelines to e-commerce tasks—demonstrating secure, efficient automation on accessible Arm platforms. -subjects: -- ML -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). - - **Project Summary** - - Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. - - The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails - - ## Prerequisites - - - Intermediate understanding in an OOP language such as Python (for front-end, if needed). - - Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. - - Access to a LLM (e.g., through an API or on-device LLM) - - Optional API access to target workflow tools such as Jira, Jenkins etc. - - - ## Resources from Arm and our partners - - - Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) - - - Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - ### Previous Submissions - 1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) - - 2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) - - - 3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) ---- -### Description - -**Why this is important?** - -AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). - -**Project Summary** - -Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. - -The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails - -## Prerequisites - -- Intermediate understanding in an OOP language such as Python (for front-end, if needed). -- Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. -- Access to a LLM (e.g., through an API or on-device LLM) -- Optional API access to target workflow tools such as Jira, Jenkins etc. - - -## Resources from Arm and our partners - -- Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) - -- Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - -### Previous Submissions -1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) - -2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) - - -3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) \ No newline at end of file diff --git a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md deleted file mode 100644 index 82c890c4..00000000 --- a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md +++ /dev/null @@ -1,123 +0,0 @@ ---- -title: AI-Powered-Porting-Tool -description: This self-service project creates an AI-driven porting engine that analyzes package dependencies, auto-generates fixes, and submits pull requests—accelerating native macOS and Windows-on-Arm support for bioinformatics and R software so researchers can run demanding workflows directly on modern Arm devices. -subjects: -- CI-CD -- ML -- Migration to Arm -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. - - **Project Summary** - - This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). - - Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. - The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: - - - Identify unported packages - - Trace recursive dependency issues - - Recommend or auto-generate build recipes and steps - - Evaluate build success and reattempt intelligently - - Generate pull requests when confident of a fix. - - For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. - - Be extensible to work with various packaging systems and languages - - This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. - - ## Prerequisites - - - Access to Apple Silicon or Windows on Arm machine. - - Familiarity with Python, Bash and Nextflow - - Familiar with genomics/bioinformatics or statistics with the R language. - - Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. - - - ## Resources from Arm and our partners - - - External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) - - Documentation: [nf-core documentation](https://nf-co.re/docs/) - - External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - - Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -**Why this is important?** - -Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. - -**Project Summary** - -This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). - -Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. -The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: - -- Identify unported packages -- Trace recursive dependency issues -- Recommend or auto-generate build recipes and steps -- Evaluate build success and reattempt intelligently -- Generate pull requests when confident of a fix. -- For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. -- Be extensible to work with various packaging systems and languages - -This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. - -## Prerequisites - -- Access to Apple Silicon or Windows on Arm machine. -- Familiarity with Python, Bash and Nextflow -- Familiar with genomics/bioinformatics or statistics with the R language. -- Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. - - -## Resources from Arm and our partners - -- External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) -- Documentation: [nf-core documentation](https://nf-co.re/docs/) -- External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages -- Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md deleted file mode 100644 index 3b66cc46..00000000 --- a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md +++ /dev/null @@ -1,99 +0,0 @@ ---- -title: AMBA-Simulator-Framework -description: This self-guided hardware project has you implement, simulate, and FPGA-prototype a Verilog AMBA bus—from simple APB to advanced CHI—sharpening hands-on expertise with Arm’s interconnect backbone and yielding a reusable reference design for future embedded systems. -subjects: -- Virtual Hardware -- Performance and Architecture -requires-team: -- No -platform: -- Embedded and Microcontrollers -sw-hw: -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Audience - Electronic Engineering - - ## Description - This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. - - The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. - - ## Prequisites - - - Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). - - Access and basic understanding of ModelSim, Quartus and Vivado - - Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools - - ## Resources from Arm and our partners - - - - Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) - - Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Previous Submissions - - Similar projects: - - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Audience -Electronic Engineering - -## Description -This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. - -The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. - -## Prequisites - -- Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). -- Access and basic understanding of ModelSim, Quartus and Vivado -- Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools - -## Resources from Arm and our partners - - -- Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) -- Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Previous Submissions - -Similar projects: - - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md deleted file mode 100644 index b79f62bc..00000000 --- a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -title: Academic-Trends-Dashboard -description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends. -subjects: -- Web -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why this is important?** - - The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. - - **Project Summary** - - The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). - - - ## Prequisites - - - Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. - - Hardware: Access to a computer with internet connectivity - - API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. - - ## Resources from Arm and our partners - - - Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) - - Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) - - Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -**Why this is important?** - -The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. - -**Project Summary** - -The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). - - -## Prequisites - -- Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. -- Hardware: Access to a computer with internet connectivity -- API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. - -## Resources from Arm and our partners - -- Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) -- Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) -- Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md deleted file mode 100644 index 16b56442..00000000 --- a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: Architecture-Insight-Dashboard -description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best. -subjects: -- Performance and Architecture -- Web -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ### Description - - **Why this is important?** - - Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. - - **Project Summary** - - This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: - - - Popularity of Arm architectures and Operating System combinations over time - - Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" - - - Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. - - ## Prequisites - - You are free to explore your own implementation. The skills below are examples. - - - Intemediate understanding of an OOP language such as Python or JavaScript - - Access to a computer with internet connectivity - - - ## Resources from Arm and our partners - - - Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) - - Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) - - Website: ["Can I Use?" dashboard](https://caniuse.com/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -### Description - -**Why this is important?** - -Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. - -**Project Summary** - -This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: - -- Popularity of Arm architectures and Operating System combinations over time -- Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" - - -Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. - -## Prequisites - -You are free to explore your own implementation. The skills below are examples. - -- Intemediate understanding of an OOP language such as Python or JavaScript -- Access to a computer with internet connectivity - - -## Resources from Arm and our partners - -- Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) -- Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) -- Website: ["Can I Use?" dashboard](https://caniuse.com/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md deleted file mode 100644 index a1e2146a..00000000 --- a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md +++ /dev/null @@ -1,125 +0,0 @@ ---- -title: Arduino-IDE-Windows-on-Arm -description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions. -subjects: -- Performance and Architecture -- Migration to Arm -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: -- trending -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. - - **Project summary** - - This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). - - ### Key Objectives: - - Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. - - Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. - - Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. - - Submit upstream patches and document issues to support long-term ecosystem health. - - This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. - - ## Prequisites - - - - Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) - - Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM - - Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` - - Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. - - - ## Resources from Arm and our partners - - - Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) - - Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) - - External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) - - Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform - - External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important?** - -Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. - -**Project summary** - -This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). - -### Key Objectives: -- Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. -- Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. -- Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. -- Submit upstream patches and document issues to support long-term ecosystem health. - -This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. - -## Prequisites - - -- Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) -- Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM -- Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` -- Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. - - -## Resources from Arm and our partners - -- Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) -- Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) -- External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) -- Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform -- External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) -- Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md deleted file mode 100644 index 66ef2921..00000000 --- a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md +++ /dev/null @@ -1,126 +0,0 @@ ---- -title: Bioinformatic-Pipeline-Analysis -description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines. -subjects: -- Performance and Architecture -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. - - **Project summary** - - This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. - - The deliverables of the project are as follows: - - - Selection and justification of public genomic datasets. - - Execution of bioinformatics workflows using Bioconda packages on Arm64. - - Performance benchmarking and comparison with x86 architectures. - - Documentation of failed package builds and proposed fixes. - - Comprehensive report with results, analysis, and recommendations. - - - ## Prequisites - - - Intermediate understanding of Python, Bash and nextflow - - Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake - - Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage - - IP access to Public genomic databases (NCBI, ENA, etc.) - - ## Resources from Arm and our partners - - - External Documentation: [nf-core documentation](https://nf-co.re/docs/) - - - External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) - - - Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) - - - Repository: [Bioconda package repository](https://bioconda.github.io/) - - - Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -### Description - -**Why this is important?** - -Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. - -**Project summary** - -This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. - -The deliverables of the project are as follows: - -- Selection and justification of public genomic datasets. -- Execution of bioinformatics workflows using Bioconda packages on Arm64. -- Performance benchmarking and comparison with x86 architectures. -- Documentation of failed package builds and proposed fixes. -- Comprehensive report with results, analysis, and recommendations. - - -## Prequisites - -- Intermediate understanding of Python, Bash and nextflow -- Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake -- Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage -- IP access to Public genomic databases (NCBI, ENA, etc.) - -## Resources from Arm and our partners - -- External Documentation: [nf-core documentation](https://nf-co.re/docs/) - -- External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) - -- Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) - -- Repository: [Bioconda package repository](https://bioconda.github.io/) - -- Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md deleted file mode 100644 index 316c87a8..00000000 --- a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md +++ /dev/null @@ -1,97 +0,0 @@ ---- -title: Compliance-Ready-Smart-Camera-System -description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles. -subjects: -- Security -- Embedded Linux -- ML -- Virtual Hardware -requires-team: -- Yes -platform: -- Mobile, Graphics, and Gaming -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -- Direct Support from Arm -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - - ## Description - - **Why this is important?** - - As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. - - **Project summary** - - Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. - - Deliverables include: - - Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) - - A survey of regulatory requirements and their impact on smart camera design - - An architectural analysis integrating Arm-based systems into a compliant automotive software stack - - Recommendations for enhancing developer tools and reference software stacks to align with ISO standards - - ## Estimated Project Duration - - Estimated Time: 6+ months - - Participants: Team of 2+ - - ## Resources from Arm and Arm partners - - Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) - - Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - -## Description - -**Why this is important?** - -As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. - -**Project summary** - -Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. - -Deliverables include: -- Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) -- A survey of regulatory requirements and their impact on smart camera design -- An architectural analysis integrating Arm-based systems into a compliant automotive software stack -- Recommendations for enhancing developer tools and reference software stacks to align with ISO standards - -## Estimated Project Duration -- Estimated Time: 6+ months -- Participants: Team of 2+ - -## Resources from Arm and Arm partners -- Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) -- Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md deleted file mode 100644 index 4890eb01..00000000 --- a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -title: FPGA-Accellerator-with-DDR -description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance. -subjects: -- Virtual Hardware -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -sw-hw: -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). - - - ## Prequisites - - - Languages: Verilog, SystemVerilog - - Tooling: Vivado, ModelSim, ASIC design tools - - Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform - - IP access: Arm Academic Access member (link to get if they don't have it) - - ## Resources from Arm and our partners - - - External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description -This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). - - -## Prequisites - -- Languages: Verilog, SystemVerilog -- Tooling: Vivado, ModelSim, ASIC design tools -- Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform -- IP access: Arm Academic Access member (link to get if they don't have it) - -## Resources from Arm and our partners - -- External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-HPC-Algorithm.md b/docs/_posts/2025-05-30-HPC-Algorithm.md deleted file mode 100644 index 3445c23e..00000000 --- a/docs/_posts/2025-05-30-HPC-Algorithm.md +++ /dev/null @@ -1,95 +0,0 @@ ---- -title: HPC-Algorithm -description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains. -subjects: -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. - - **Project summary** - - This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. - - ## Prequisites - - - Intermediate undestanding of C, C++ or Fortran. - - Experience with high performance compute (HPC). - - Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. - - Access to Arm-based servers or SVE-enabled hardware - - ## Resources from Arm and our partners - - - Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) - - Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) - - Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -**Why this is important?** - -Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. - -**Project summary** - -This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. - -## Prequisites - -- Intermediate undestanding of C, C++ or Fortran. -- Experience with high performance compute (HPC). -- Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. -- Access to Arm-based servers or SVE-enabled hardware - -## Resources from Arm and our partners - -- Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) -- Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) -- Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md deleted file mode 100644 index 60ed1e4a..00000000 --- a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md +++ /dev/null @@ -1,125 +0,0 @@ ---- -title: Haskell-Compiler-Windows-on-Arm -description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration. -subjects: -- Migration to Arm -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. - - - **Project summary** - - Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: - - Enabling native compilation of Haskell code via GHC on WoA. - - Implementing and testing architecture-specific assembly and intrinsic functions. - - Extending the GHC build system to recognize WoA environments. - - Integrating and validating linker and runtime support on Arm-based Windows systems. - - The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. - - The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. - - --- - - ## Prequisites - - - Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) - - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - Intemediate understanding of Arm64 Assembly (AArch64) - - Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) - - Access to MSYS2 / CMake / Ninja for Windows builds - - - ## Resources from Arm and our partners - - - External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) - - Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) - - External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important?** - -The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. - - -**Project summary** - -Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: -- Enabling native compilation of Haskell code via GHC on WoA. -- Implementing and testing architecture-specific assembly and intrinsic functions. -- Extending the GHC build system to recognize WoA environments. -- Integrating and validating linker and runtime support on Arm-based Windows systems. - -The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. - -The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. - ---- - -## Prequisites - -- Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) -- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) -- Intemediate understanding of Arm64 Assembly (AArch64) -- Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) -- Access to MSYS2 / CMake / Ninja for Windows builds - - -## Resources from Arm and our partners - -- External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) -- Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) -- External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Human-Centric-Robotics.md b/docs/_posts/2025-05-30-Human-Centric-Robotics.md deleted file mode 100644 index c06979f0..00000000 --- a/docs/_posts/2025-05-30-Human-Centric-Robotics.md +++ /dev/null @@ -1,113 +0,0 @@ ---- -title: Human-Centric-Robotics -description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services. -subjects: -- ML -- Embedded Linux -- RTOS Fundamentals -requires-team: -- Yes -platform: -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -- Direct Support from Arm -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - ## Description - - **Why this is important?** - - Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. - - **Project Summary** - - This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. - - - Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). - - Potential Deliverables include: - - A working prototype running on an Arm-based platform - - Software stack (navigation, ML inference, interaction logic) - - Field evaluation results & UX data (e.g., survey or usage logs) - - Report of development process and considerations when prototyping an end-user product. - - A socioeconomic impact report using modeling or simulation techniques - - *Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* - - ## Estimated Project Duration - - 6+ months - Team size: 2+ participants - - ## Prerequisites - - - **Languages**: Familiarity with an OOP language. - - **Hardware**: - - **IP/Cloud Access**: - - Any cloud service provider with Arm-based instances (for model training or data analysis) - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - -## Description - -**Why this is important?** - -Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. - -**Project Summary** - -This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. - - -Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). - -Potential Deliverables include: -- A working prototype running on an Arm-based platform -- Software stack (navigation, ML inference, interaction logic) -- Field evaluation results & UX data (e.g., survey or usage logs) -- Report of development process and considerations when prototyping an end-user product. -- A socioeconomic impact report using modeling or simulation techniques - -*Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* - -## Estimated Project Duration - -6+ months -Team size: 2+ participants - -## Prerequisites - -- **Languages**: Familiarity with an OOP language. -- **Hardware**: -- **IP/Cloud Access**: - - Any cloud service provider with Arm-based instances (for model training or data analysis) - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md deleted file mode 100644 index 7c87bffe..00000000 --- a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -title: LLM-Benchmark-on-Arm-Server -description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. - - - ## Prequisites - - - Intermediate understanding of Python and C++ - - Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch - - Access to physcial Arm-based server or access to cloud service providers - - ## Resources from Arm and our partners - - - Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) - - External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) - - Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description -This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. - - -## Prequisites - -- Intermediate understanding of Python and C++ -- Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch -- Access to physcial Arm-based server or access to cloud service providers - -## Resources from Arm and our partners - -- Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) -- External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) -- Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md deleted file mode 100644 index 5aedb7ef..00000000 --- a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: Machine-Learning-on-AWS-Graviton -description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads. -subjects: -- ML -- Migration to Arm -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why is this important?** - - This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. - - - **Project Summary** - - The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. - - ## Prequisites - - - Intemediate understanding of Python. - - Understanding of transformer architectures, vision transformer architectures and inference optimization - - Experience using PyTorch or ONNX Runtime (CPU execution provider) - - Experience with libraries such as Hugging Face Transformers, torchvision - - Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) - - Familiarity with Linux, Docker, and cloud environments - - - ## Resources from Arm and our partners - - - - Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) - - Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) - - Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) - - External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) - - Repository: [GGML library](https://github.com/ggml-org/ggml) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -**Why is this important?** - -This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. - - -**Project Summary** - -The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. - -## Prequisites - -- Intemediate understanding of Python. -- Understanding of transformer architectures, vision transformer architectures and inference optimization -- Experience using PyTorch or ONNX Runtime (CPU execution provider) -- Experience with libraries such as Hugging Face Transformers, torchvision -- Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) -- Familiarity with Linux, Docker, and cloud environments - - -## Resources from Arm and our partners - - -- Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) -- Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) -- Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) -- External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) -- Repository: [GGML library](https://github.com/ggml-org/ggml) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md deleted file mode 100644 index 9cc92a5b..00000000 --- a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md +++ /dev/null @@ -1,121 +0,0 @@ ---- -title: Processor-in-the-Loop-Automotive -description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor. -subjects: -- Embedded Linux -- RTOS Fundamentals -- Virtual Hardware -requires-team: -- No -platform: -- Laptops and Desktops -- Automotive -- Embedded and Microcontrollers -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important** - - Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. - - **Project summary** - - Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. - - ## Prequisites - - - [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) - - Familiarity with C/C++, Simulink, Stateflow and Embedded Coder - - Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer - - Understanding of automotive software development such as V-Model lifecycle methodology. - - - ## Resources from Arm and our partners - - - Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) - - Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) - - Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) - - Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) - - Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) - - Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) - - Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) - - Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) - - Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) - - Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) - - Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) - - Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) - - Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important** - -Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. - -**Project summary** - -Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. - -## Prequisites - -- [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) -- Familiarity with C/C++, Simulink, Stateflow and Embedded Coder -- Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer -- Understanding of automotive software development such as V-Model lifecycle methodology. - - -## Resources from Arm and our partners - -- Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) -- Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) -- Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) -- Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) -- Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) -- Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) -- Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) -- Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) -- Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) -- Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) -- Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) -- Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) -- Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md deleted file mode 100644 index 66cfd281..00000000 --- a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -title: Quantisation-Aware-Training -description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). - - The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) - - Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: - - Sign language recognition for accessibility. - - Visual anomaly detection in manufacturing. - - Personal health and activity monitoring from camera feeds. - - The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. - - ## Prequisites - - - **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). - - **Frameworks**: Intermediate understanding of PyTorch - - **Tooling**: PyTorch Lightning, Android Studio - - **Hardware Options**: - - Android phone with Arm Cortex-A CPU or simulator through Android Studio. - - **Deployment Targets**: - - Android - - ## Resources from Arm and our partners - - - Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) - - Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) - - Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). - -The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) - -Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: -- Sign language recognition for accessibility. -- Visual anomaly detection in manufacturing. -- Personal health and activity monitoring from camera feeds. - -The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. - -## Prequisites - -- **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). -- **Frameworks**: Intermediate understanding of PyTorch -- **Tooling**: PyTorch Lightning, Android Studio -- **Hardware Options**: - - Android phone with Arm Cortex-A CPU or simulator through Android Studio. -- **Deployment Targets**: - - Android - -## Resources from Arm and our partners - -- Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) -- Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) -- Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-R-Arm-Community-Support.md b/docs/_posts/2025-05-30-R-Arm-Community-Support.md deleted file mode 100644 index 3061387b..00000000 --- a/docs/_posts/2025-05-30-R-Arm-Community-Support.md +++ /dev/null @@ -1,140 +0,0 @@ ---- -title: R-Arm-Community-Support -description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops. -subjects: -- Performance and Architecture -- Migration to Arm -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. - - **Project summary** - - - This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: - - - - **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. - - **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. - - **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. - - **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) - - **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. - - Stretch Objectives: - - - **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). - - The deliverables include: - - - Patches, request for comments and bug reports the highest impact packages - - A curated list of packages with proposed WoA support status - - A short technical write-up describing the contributions and challenges - - ## Prequisites - - - Intermediate understanding of the R language - - Intermediate understanding of Rtools, Git and Docker for cross-compilation. - - Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. - - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). - - ## Resources from Arm and our partners - - - Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) - - Documentation: [R Bugzilla](https://bugs.r-project.org/) - - Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) - - Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) - - Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important?** - -Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. - -**Project summary** - - -This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: - - -- **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. -- **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. -- **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. -- **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) -- **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. - -Stretch Objectives: - -- **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). - -The deliverables include: - -- Patches, request for comments and bug reports the highest impact packages -- A curated list of packages with proposed WoA support status -- A short technical write-up describing the contributions and challenges - -## Prequisites - -- Intermediate understanding of the R language -- Intermediate understanding of Rtools, Git and Docker for cross-compilation. -- Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. -- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). - -## Resources from Arm and our partners - -- Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) -- Documentation: [R Bugzilla](https://bugs.r-project.org/) -- Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) -- Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) -- Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md deleted file mode 100644 index 19478c90..00000000 --- a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -title: Real-Time-Image-Classification -description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. - - - ## Prequisites - - - Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. - - Tooling: - - TensorFlow Lite - - CMSIS-NN - - Keil MDK - - Hardware: - - Arm Cortex-M based microcontroller development board and compatible camera module. - - Access to hardware suitable for training neural networks - - ## Resources from Arm and our partners - - - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) - - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) - - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) - - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description -This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. - - -## Prequisites - -- Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. -- Tooling: - - TensorFlow Lite - - CMSIS-NN - - Keil MDK -- Hardware: - - Arm Cortex-M based microcontroller development board and compatible camera module. - - Access to hardware suitable for training neural networks - -## Resources from Arm and our partners - -- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) -- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) -- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) -- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md deleted file mode 100644 index f356ec62..00000000 --- a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md +++ /dev/null @@ -1,144 +0,0 @@ ---- -title: Responsible-AI-and-Yellow-Teaming -description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle. -subjects: -- ML -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - - **Why this is important?** - - AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. - - **Project summary** - - This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. - - The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. - - Key Objectives of Your Project - - Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. - - Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. - - Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. - - Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. - - - ## Prequisites - - If deploying a private Llama model -> - - **Hardware**: - - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. - - **Software**: - - PyTorch and Hugging Face account - - `torchchat` repo and dependencies - - Hugging Face CLI for LLM download - - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) - - **Skills**: - - Proficiency in Python and PyTorch - - [Hugging Face account](https://huggingface.co/) - - Understanding of LLMs and prompting techniques - - If using a public LLM -> - - **Hardware**: - - None needed - - **Software**: - - Access to a public LLM - - **Skills**: - - Understanding of LLMs and prompting techniques - - ## Resources from Arm and our partners - - - External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) - - Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) - - Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - - -**Why this is important?** - -AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. - -**Project summary** - -This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. - -The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. - -Key Objectives of Your Project -- Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. -- Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. -- Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. -- Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. - - -## Prequisites - -If deploying a private Llama model -> -- **Hardware**: - - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. -- **Software**: - - PyTorch and Hugging Face account - - `torchchat` repo and dependencies - - Hugging Face CLI for LLM download - - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) -- **Skills**: - - Proficiency in Python and PyTorch - - [Hugging Face account](https://huggingface.co/) - - Understanding of LLMs and prompting techniques - -If using a public LLM -> -- **Hardware**: - - None needed -- **Software**: - - Access to a public LLM -- **Skills**: - - Understanding of LLMs and prompting techniques - -## Resources from Arm and our partners - -- External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) -- Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) -- Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md deleted file mode 100644 index 1f910af8..00000000 --- a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md +++ /dev/null @@ -1,85 +0,0 @@ ---- -title: Sentiment-Analysis-Dashboard -description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes. -subjects: -- ML -- Web -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). - - This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. - - ## Prequisites - - - Languages: Intermediate understanding of Python - - Hardware: Access to a computer with internet connectivity and access to cloud instances - - ## Resources from Arm and our partners - - You are free to choose your own implementation details. The resouces below are examples to get started. - - - External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) - - Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description -This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). - -This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. - -## Prequisites - -- Languages: Intermediate understanding of Python -- Hardware: Access to a computer with internet connectivity and access to cloud instances - -## Resources from Arm and our partners - -You are free to choose your own implementation details. The resouces below are examples to get started. - -- External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) -- Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md deleted file mode 100644 index a3394f30..00000000 --- a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -title: Smart-Voice-Assistant -description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use. -subjects: -- ML -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. - - The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. - - ## Prequisites - - - Languages: Python, C++, Embedded C - - Tooling: TensorFlow Lite for Microcontrollers, Keil MDK - - Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) - - - ## Resources from Arm and our partners - - - Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) - - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) - - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) - - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description -This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. - -The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. - -## Prequisites - -- Languages: Python, C++, Embedded C -- Tooling: TensorFlow Lite for Microcontrollers, Keil MDK -- Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) - - -## Resources from Arm and our partners - -- Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) -- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) -- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) -- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md deleted file mode 100644 index 01f66357..00000000 --- a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md +++ /dev/null @@ -1,126 +0,0 @@ ---- -title: SpecINT2017-benchmarking-on-Arm64 -description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks. -subjects: -- Performance and Architecture -- Migration to Arm -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. - - **Project Summary** - - This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. - - ## Prequisites - - Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) - - Software: Familiarity with performance engineering and a OOP with a language such as C++. - - Compilers: GCC, LLVM/Clang, Arm Compiler for Linux - - Profiling Tools: perf, Arm Performance Libraries - - Workloads: SPEC CPU2017 (academic license required), custom workloads - - ## Resources from Arm and our partners - - - Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) - - - Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) - - - Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) - - - Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) - - - Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) - - - Documentation: [GNU compilers](https://gcc.gnu.org/) - - - Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -### Description - -**Why this is important?** - -SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. - -**Project Summary** - -This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. - -## Prequisites - -Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) - -Software: Familiarity with performance engineering and a OOP with a language such as C++. - -Compilers: GCC, LLVM/Clang, Arm Compiler for Linux - -Profiling Tools: perf, Arm Performance Libraries - -Workloads: SPEC CPU2017 (academic license required), custom workloads - -## Resources from Arm and our partners - -- Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) - -- Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) - -- Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) - -- Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) - -- Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) - -- Documentation: [GNU compilers](https://gcc.gnu.org/) - -- Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Write-A-Learning-Path.md b/docs/_posts/2025-05-30-Write-A-Learning-Path.md deleted file mode 100644 index b372f0b5..00000000 --- a/docs/_posts/2025-05-30-Write-A-Learning-Path.md +++ /dev/null @@ -1,80 +0,0 @@ ---- -title: Write-A-Learning-Path -description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills. -subjects: -- Libraries -- Web -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! - - ## Prequisites - - - Computer with Internet Connectivity - - ## Resources from Arm and our partners - - - Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) - - Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description -This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! - -## Prequisites - -- Computer with Internet Connectivity - -## Resources from Arm and our partners - -- Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) -- Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-ai-agents.md b/docs/_posts/2025-05-30-ai-agents.md deleted file mode 100644 index 26be05ed..00000000 --- a/docs/_posts/2025-05-30-ai-agents.md +++ /dev/null @@ -1,113 +0,0 @@ ---- -title: AI-Powered Workflow Agent in a Sandboxed Environment -description: This self-service project builds a sandboxed AI agent on Arm hardware that harnesses appropriately sized LLMs to safely automate complex workflows—from DevOps pipelines to e-commerce tasks—demonstrating secure, efficient automation on accessible Arm platforms. -subjects: -- ML -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). - - **Project Summary** - - Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. - - The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails - - ## Prerequisites - - - Intermediate understanding in an OOP language such as Python (for front-end, if needed). - - Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. - - Access to a LLM (e.g., through an API or on-device LLM) - - Optional API access to target workflow tools such as Jira, Jenkins etc. - - - ## Resources from Arm and our partners - - - Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) - - - Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - ### Previous Submissions - 1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) - - 2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) - - - 3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) ---- -### Description - -**Why this is important?** - -AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). - -**Project Summary** - -Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. - -The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails - -## Prerequisites - -- Intermediate understanding in an OOP language such as Python (for front-end, if needed). -- Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. -- Access to a LLM (e.g., through an API or on-device LLM) -- Optional API access to target workflow tools such as Jira, Jenkins etc. - - -## Resources from Arm and our partners - -- Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) - -- Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - -### Previous Submissions -1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) - -2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) - - -3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) \ No newline at end of file diff --git a/docs/_posts/2025-05-30-ai-powered-porting-tool.md b/docs/_posts/2025-05-30-ai-powered-porting-tool.md deleted file mode 100644 index 4c6456f3..00000000 --- a/docs/_posts/2025-05-30-ai-powered-porting-tool.md +++ /dev/null @@ -1,123 +0,0 @@ ---- -title: AI-Powered Package Porting Tool for the Arm Architectures -description: This self-service project creates an AI-driven porting engine that analyzes package dependencies, auto-generates fixes, and submits pull requests—accelerating native macOS and Windows-on-Arm support for bioinformatics and R software so researchers can run demanding workflows directly on modern Arm devices. -subjects: -- CI-CD -- ML -- Migration to Arm -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. - - **Project Summary** - - This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). - - Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. - The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: - - - Identify unported packages - - Trace recursive dependency issues - - Recommend or auto-generate build recipes and steps - - Evaluate build success and reattempt intelligently - - Generate pull requests when confident of a fix. - - For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. - - Be extensible to work with various packaging systems and languages - - This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. - - ## Prerequisites - - - Access to Apple Silicon or Windows on Arm machine. - - Familiarity with Python, Bash and Nextflow - - Familiar with genomics/bioinformatics or statistics with the R language. - - Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. - - - ## Resources from Arm and our partners - - - External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) - - Documentation: [nf-core documentation](https://nf-co.re/docs/) - - External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - - Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -**Why this is important?** - -Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. - -**Project Summary** - -This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). - -Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. -The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: - -- Identify unported packages -- Trace recursive dependency issues -- Recommend or auto-generate build recipes and steps -- Evaluate build success and reattempt intelligently -- Generate pull requests when confident of a fix. -- For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. -- Be extensible to work with various packaging systems and languages - -This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. - -## Prerequisites - -- Access to Apple Silicon or Windows on Arm machine. -- Familiarity with Python, Bash and Nextflow -- Familiar with genomics/bioinformatics or statistics with the R language. -- Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. - - -## Resources from Arm and our partners - -- External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) -- Documentation: [nf-core documentation](https://nf-co.re/docs/) -- External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages -- Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-amba-simulator-framework.md b/docs/_posts/2025-05-30-amba-simulator-framework.md deleted file mode 100644 index 88cd6c10..00000000 --- a/docs/_posts/2025-05-30-amba-simulator-framework.md +++ /dev/null @@ -1,99 +0,0 @@ ---- -title: AMBA Infrastructure Design and Simulation Framework -description: This self-guided hardware project has you implement, simulate, and FPGA-prototype a Verilog AMBA bus—from simple APB to advanced CHI—sharpening hands-on expertise with Arm’s interconnect backbone and yielding a reusable reference design for future embedded systems. -subjects: -- Virtual Hardware -- Performance and Architecture -requires-team: -- No -platform: -- Embedded and Microcontrollers -sw-hw: -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Audience - Electronic Engineering - - ## Description - This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. - - The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. - - ## Prequisites - - - Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). - - Access and basic understanding of ModelSim, Quartus and Vivado - - Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools - - ## Resources from Arm and our partners - - - - Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) - - Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Previous Submissions - - Similar projects: - - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Audience -Electronic Engineering - -## Description -This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. - -The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. - -## Prequisites - -- Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). -- Access and basic understanding of ModelSim, Quartus and Vivado -- Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools - -## Resources from Arm and our partners - - -- Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) -- Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Previous Submissions - -Similar projects: - - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-architecture-insight-dashboard.md b/docs/_posts/2025-05-30-architecture-insight-dashboard.md deleted file mode 100644 index c9d884ba..00000000 --- a/docs/_posts/2025-05-30-architecture-insight-dashboard.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: Develop an Arm Architecture Insight Dashboard -description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best. -subjects: -- Performance and Architecture -- Web -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ### Description - - **Why this is important?** - - Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. - - **Project Summary** - - This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: - - - Popularity of Arm architectures and Operating System combinations over time - - Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" - - - Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. - - ## Prequisites - - You are free to explore your own implementation. The skills below are examples. - - - Intemediate understanding of an OOP language such as Python or JavaScript - - Access to a computer with internet connectivity - - - ## Resources from Arm and our partners - - - Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) - - Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) - - Website: ["Can I Use?" dashboard](https://caniuse.com/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -### Description - -**Why this is important?** - -Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. - -**Project Summary** - -This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: - -- Popularity of Arm architectures and Operating System combinations over time -- Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" - - -Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. - -## Prequisites - -You are free to explore your own implementation. The skills below are examples. - -- Intemediate understanding of an OOP language such as Python or JavaScript -- Access to a computer with internet connectivity - - -## Resources from Arm and our partners - -- Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) -- Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) -- Website: ["Can I Use?" dashboard](https://caniuse.com/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md b/docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md deleted file mode 100644 index c342deaf..00000000 --- a/docs/_posts/2025-05-30-arduino-ide-windows-on-arm.md +++ /dev/null @@ -1,125 +0,0 @@ ---- -title: Porting and Optimizing Arduino IDE for Windows on Arm -description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions. -subjects: -- Performance and Architecture -- Migration to Arm -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: -- trending -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. - - **Project summary** - - This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). - - ### Key Objectives: - - Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. - - Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. - - Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. - - Submit upstream patches and document issues to support long-term ecosystem health. - - This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. - - ## Prequisites - - - - Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) - - Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM - - Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` - - Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. - - - ## Resources from Arm and our partners - - - Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) - - Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) - - External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) - - Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform - - External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important?** - -Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. - -**Project summary** - -This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). - -### Key Objectives: -- Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. -- Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. -- Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. -- Submit upstream patches and document issues to support long-term ecosystem health. - -This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. - -## Prequisites - - -- Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) -- Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM -- Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` -- Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. - - -## Resources from Arm and our partners - -- Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) -- Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) -- External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) -- Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform -- External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) -- Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md b/docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md deleted file mode 100644 index e9da6e8b..00000000 --- a/docs/_posts/2025-05-30-bioinformatic-pipeline-analysis.md +++ /dev/null @@ -1,126 +0,0 @@ ---- -title: Benchmarking Bioconda Packages for Arm64 in Bioinformatics Pipelines -description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines. -subjects: -- Performance and Architecture -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. - - **Project summary** - - This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. - - The deliverables of the project are as follows: - - - Selection and justification of public genomic datasets. - - Execution of bioinformatics workflows using Bioconda packages on Arm64. - - Performance benchmarking and comparison with x86 architectures. - - Documentation of failed package builds and proposed fixes. - - Comprehensive report with results, analysis, and recommendations. - - - ## Prequisites - - - Intermediate understanding of Python, Bash and nextflow - - Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake - - Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage - - IP access to Public genomic databases (NCBI, ENA, etc.) - - ## Resources from Arm and our partners - - - External Documentation: [nf-core documentation](https://nf-co.re/docs/) - - - External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) - - - Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) - - - Repository: [Bioconda package repository](https://bioconda.github.io/) - - - Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -### Description - -**Why this is important?** - -Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. - -**Project summary** - -This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. - -The deliverables of the project are as follows: - -- Selection and justification of public genomic datasets. -- Execution of bioinformatics workflows using Bioconda packages on Arm64. -- Performance benchmarking and comparison with x86 architectures. -- Documentation of failed package builds and proposed fixes. -- Comprehensive report with results, analysis, and recommendations. - - -## Prequisites - -- Intermediate understanding of Python, Bash and nextflow -- Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake -- Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage -- IP access to Public genomic databases (NCBI, ENA, etc.) - -## Resources from Arm and our partners - -- External Documentation: [nf-core documentation](https://nf-co.re/docs/) - -- External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) - -- Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) - -- Repository: [Bioconda package repository](https://bioconda.github.io/) - -- Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md b/docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md deleted file mode 100644 index 539ab464..00000000 --- a/docs/_posts/2025-05-30-compliance-ready-smart-camera-system.md +++ /dev/null @@ -1,97 +0,0 @@ ---- -title: End-to-End Computer Vision System for Functional Safety -description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles. -subjects: -- Security -- Embedded Linux -- ML -- Virtual Hardware -requires-team: -- Yes -platform: -- Mobile, Graphics, and Gaming -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -- Direct Support from Arm -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - - ## Description - - **Why this is important?** - - As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. - - **Project summary** - - Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. - - Deliverables include: - - Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) - - A survey of regulatory requirements and their impact on smart camera design - - An architectural analysis integrating Arm-based systems into a compliant automotive software stack - - Recommendations for enhancing developer tools and reference software stacks to align with ISO standards - - ## Estimated Project Duration - - Estimated Time: 6+ months - - Participants: Team of 2+ - - ## Resources from Arm and Arm partners - - Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) - - Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - -## Description - -**Why this is important?** - -As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. - -**Project summary** - -Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. - -Deliverables include: -- Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) -- A survey of regulatory requirements and their impact on smart camera design -- An architectural analysis integrating Arm-based systems into a compliant automotive software stack -- Recommendations for enhancing developer tools and reference software stacks to align with ISO standards - -## Estimated Project Duration -- Estimated Time: 6+ months -- Participants: Team of 2+ - -## Resources from Arm and Arm partners -- Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) -- Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md b/docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md deleted file mode 100644 index 41b544c1..00000000 --- a/docs/_posts/2025-05-30-fpga-accellerator-with-ddr.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -title: Linux Capable SoC FPGA Prototyping Platform with DDR Memory -description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance. -subjects: -- Virtual Hardware -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -sw-hw: -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). - - - ## Prequisites - - - Languages: Verilog, SystemVerilog - - Tooling: Vivado, ModelSim, ASIC design tools - - Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform - - IP access: Arm Academic Access member (link to get if they don't have it) - - ## Resources from Arm and our partners - - - External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description -This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). - - -## Prequisites - -- Languages: Verilog, SystemVerilog -- Tooling: Vivado, ModelSim, ASIC design tools -- Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform -- IP access: Arm Academic Access member (link to get if they don't have it) - -## Resources from Arm and our partners - -- External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md b/docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md deleted file mode 100644 index 137983af..00000000 --- a/docs/_posts/2025-05-30-haskell-compiler-windows-on-arm.md +++ /dev/null @@ -1,125 +0,0 @@ ---- -title: Adding Windows on Arm Support to the Glasgow Haskell Compiler (GHC) -description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration. -subjects: -- Migration to Arm -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. - - - **Project summary** - - Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: - - Enabling native compilation of Haskell code via GHC on WoA. - - Implementing and testing architecture-specific assembly and intrinsic functions. - - Extending the GHC build system to recognize WoA environments. - - Integrating and validating linker and runtime support on Arm-based Windows systems. - - The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. - - The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. - - --- - - ## Prequisites - - - Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) - - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - Intemediate understanding of Arm64 Assembly (AArch64) - - Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) - - Access to MSYS2 / CMake / Ninja for Windows builds - - - ## Resources from Arm and our partners - - - External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) - - Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) - - External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important?** - -The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. - - -**Project summary** - -Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: -- Enabling native compilation of Haskell code via GHC on WoA. -- Implementing and testing architecture-specific assembly and intrinsic functions. -- Extending the GHC build system to recognize WoA environments. -- Integrating and validating linker and runtime support on Arm-based Windows systems. - -The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. - -The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. - ---- - -## Prequisites - -- Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) -- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) -- Intemediate understanding of Arm64 Assembly (AArch64) -- Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) -- Access to MSYS2 / CMake / Ninja for Windows builds - - -## Resources from Arm and our partners - -- External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) -- Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) -- External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-hpc-algorithm.md b/docs/_posts/2025-05-30-hpc-algorithm.md deleted file mode 100644 index b528b8f0..00000000 --- a/docs/_posts/2025-05-30-hpc-algorithm.md +++ /dev/null @@ -1,95 +0,0 @@ ---- -title: Optimise Performance of an Algorithm Used in High-Performance Compute Using Scalable Vector Extensions (SVE / SVE2) -description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains. -subjects: -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. - - **Project summary** - - This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. - - ## Prequisites - - - Intermediate undestanding of C, C++ or Fortran. - - Experience with high performance compute (HPC). - - Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. - - Access to Arm-based servers or SVE-enabled hardware - - ## Resources from Arm and our partners - - - Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) - - Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) - - Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -**Why this is important?** - -Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. - -**Project summary** - -This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. - -## Prequisites - -- Intermediate undestanding of C, C++ or Fortran. -- Experience with high performance compute (HPC). -- Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. -- Access to Arm-based servers or SVE-enabled hardware - -## Resources from Arm and our partners - -- Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) -- Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) -- Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-human-centric-robotics.md b/docs/_posts/2025-05-30-human-centric-robotics.md deleted file mode 100644 index 0c04efba..00000000 --- a/docs/_posts/2025-05-30-human-centric-robotics.md +++ /dev/null @@ -1,113 +0,0 @@ ---- -title: Human-Centric Robotics – Urban Deployment & Socioeconomic Modelling -description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services. -subjects: -- ML -- Embedded Linux -- RTOS Fundamentals -requires-team: -- Yes -platform: -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -- Direct Support from Arm -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - ## Description - - **Why this is important?** - - Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. - - **Project Summary** - - This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. - - - Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). - - Potential Deliverables include: - - A working prototype running on an Arm-based platform - - Software stack (navigation, ML inference, interaction logic) - - Field evaluation results & UX data (e.g., survey or usage logs) - - Report of development process and considerations when prototyping an end-user product. - - A socioeconomic impact report using modeling or simulation techniques - - *Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* - - ## Estimated Project Duration - - 6+ months - Team size: 2+ participants - - ## Prerequisites - - - **Languages**: Familiarity with an OOP language. - - **Hardware**: - - **IP/Cloud Access**: - - Any cloud service provider with Arm-based instances (for model training or data analysis) - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - -## Description - -**Why this is important?** - -Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. - -**Project Summary** - -This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. - - -Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). - -Potential Deliverables include: -- A working prototype running on an Arm-based platform -- Software stack (navigation, ML inference, interaction logic) -- Field evaluation results & UX data (e.g., survey or usage logs) -- Report of development process and considerations when prototyping an end-user product. -- A socioeconomic impact report using modeling or simulation techniques - -*Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* - -## Estimated Project Duration - -6+ months -Team size: 2+ participants - -## Prerequisites - -- **Languages**: Familiarity with an OOP language. -- **Hardware**: -- **IP/Cloud Access**: - - Any cloud service provider with Arm-based instances (for model training or data analysis) - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md b/docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md deleted file mode 100644 index 31f91a63..00000000 --- a/docs/_posts/2025-05-30-llm-benchmark-on-arm-server.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -title: LLM Benchmark for Arm Server -description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. - - - ## Prequisites - - - Intermediate understanding of Python and C++ - - Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch - - Access to physcial Arm-based server or access to cloud service providers - - ## Resources from Arm and our partners - - - Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) - - External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) - - Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description -This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. - - -## Prequisites - -- Intermediate understanding of Python and C++ -- Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch -- Access to physcial Arm-based server or access to cloud service providers - -## Resources from Arm and our partners - -- Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) -- External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) -- Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md b/docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md deleted file mode 100644 index 148ccb56..00000000 --- a/docs/_posts/2025-05-30-machine-learning-on-aws-graviton.md +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: Efficient Inference of text-to-video (OpenSora) on AWS Graviton Instances -description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads. -subjects: -- ML -- Migration to Arm -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why is this important?** - - This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. - - - **Project Summary** - - The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. - - ## Prequisites - - - Intemediate understanding of Python. - - Understanding of transformer architectures, vision transformer architectures and inference optimization - - Experience using PyTorch or ONNX Runtime (CPU execution provider) - - Experience with libraries such as Hugging Face Transformers, torchvision - - Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) - - Familiarity with Linux, Docker, and cloud environments - - - ## Resources from Arm and our partners - - - - Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) - - Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) - - Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) - - External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) - - Repository: [GGML library](https://github.com/ggml-org/ggml) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -**Why is this important?** - -This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. - - -**Project Summary** - -The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. - -## Prequisites - -- Intemediate understanding of Python. -- Understanding of transformer architectures, vision transformer architectures and inference optimization -- Experience using PyTorch or ONNX Runtime (CPU execution provider) -- Experience with libraries such as Hugging Face Transformers, torchvision -- Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) -- Familiarity with Linux, Docker, and cloud environments - - -## Resources from Arm and our partners - - -- Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) -- Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) -- Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) -- External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) -- Repository: [GGML library](https://github.com/ggml-org/ggml) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-processor-in-the-loop-automotive.md b/docs/_posts/2025-05-30-processor-in-the-loop-automotive.md deleted file mode 100644 index 02f20812..00000000 --- a/docs/_posts/2025-05-30-processor-in-the-loop-automotive.md +++ /dev/null @@ -1,121 +0,0 @@ ---- -title: Processor in the Loop Automotive Controller on an Arm Cortex M7 Fast Model -description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor. -subjects: -- Embedded Linux -- RTOS Fundamentals -- Virtual Hardware -requires-team: -- No -platform: -- Laptops and Desktops -- Automotive -- Embedded and Microcontrollers -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important** - - Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. - - **Project summary** - - Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. - - ## Prequisites - - - [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) - - Familiarity with C/C++, Simulink, Stateflow and Embedded Coder - - Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer - - Understanding of automotive software development such as V-Model lifecycle methodology. - - - ## Resources from Arm and our partners - - - Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) - - Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) - - Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) - - Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) - - Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) - - Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) - - Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) - - Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) - - Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) - - Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) - - Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) - - Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) - - Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important** - -Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. - -**Project summary** - -Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. - -## Prequisites - -- [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) -- Familiarity with C/C++, Simulink, Stateflow and Embedded Coder -- Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer -- Understanding of automotive software development such as V-Model lifecycle methodology. - - -## Resources from Arm and our partners - -- Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) -- Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) -- Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) -- Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) -- Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) -- Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) -- Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) -- Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) -- Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) -- Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) -- Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) -- Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) -- Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-projects.md b/docs/_posts/2025-05-30-projects.md deleted file mode 100644 index fede046c..00000000 --- a/docs/_posts/2025-05-30-projects.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -title: projects -filter: project -publication-date: 2025-05-30 -layout: article -full_description: |- - **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. - - Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). ---- -**Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. - -Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). \ No newline at end of file diff --git a/docs/_posts/2025-05-30-quantisation-aware-training.md b/docs/_posts/2025-05-30-quantisation-aware-training.md deleted file mode 100644 index 3a986fe2..00000000 --- a/docs/_posts/2025-05-30-quantisation-aware-training.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -title: 'Quantization-Aware Training for Mobile Deployment: Deploying Lightweight Models on Arm' -description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). - - The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) - - Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: - - Sign language recognition for accessibility. - - Visual anomaly detection in manufacturing. - - Personal health and activity monitoring from camera feeds. - - The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. - - ## Prequisites - - - **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). - - **Frameworks**: Intermediate understanding of PyTorch - - **Tooling**: PyTorch Lightning, Android Studio - - **Hardware Options**: - - Android phone with Arm Cortex-A CPU or simulator through Android Studio. - - **Deployment Targets**: - - Android - - ## Resources from Arm and our partners - - - Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) - - Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) - - Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). - -The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) - -Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: -- Sign language recognition for accessibility. -- Visual anomaly detection in manufacturing. -- Personal health and activity monitoring from camera feeds. - -The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. - -## Prequisites - -- **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). -- **Frameworks**: Intermediate understanding of PyTorch -- **Tooling**: PyTorch Lightning, Android Studio -- **Hardware Options**: - - Android phone with Arm Cortex-A CPU or simulator through Android Studio. -- **Deployment Targets**: - - Android - -## Resources from Arm and our partners - -- Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) -- Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) -- Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-r-arm-community-support.md b/docs/_posts/2025-05-30-r-arm-community-support.md deleted file mode 100644 index 56919c91..00000000 --- a/docs/_posts/2025-05-30-r-arm-community-support.md +++ /dev/null @@ -1,140 +0,0 @@ ---- -title: Improving R Support for the Windows on Arm Community -description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops. -subjects: -- Performance and Architecture -- Migration to Arm -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. - - **Project summary** - - - This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: - - - - **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. - - **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. - - **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. - - **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) - - **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. - - Stretch Objectives: - - - **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). - - The deliverables include: - - - Patches, request for comments and bug reports the highest impact packages - - A curated list of packages with proposed WoA support status - - A short technical write-up describing the contributions and challenges - - ## Prequisites - - - Intermediate understanding of the R language - - Intermediate understanding of Rtools, Git and Docker for cross-compilation. - - Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. - - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). - - ## Resources from Arm and our partners - - - Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) - - Documentation: [R Bugzilla](https://bugs.r-project.org/) - - Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) - - Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) - - Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why this is important?** - -Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. - -**Project summary** - - -This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: - - -- **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. -- **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. -- **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. -- **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) -- **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. - -Stretch Objectives: - -- **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). - -The deliverables include: - -- Patches, request for comments and bug reports the highest impact packages -- A curated list of packages with proposed WoA support status -- A short technical write-up describing the contributions and challenges - -## Prequisites - -- Intermediate understanding of the R language -- Intermediate understanding of Rtools, Git and Docker for cross-compilation. -- Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. -- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). - -## Resources from Arm and our partners - -- Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) -- Documentation: [R Bugzilla](https://bugs.r-project.org/) -- Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) -- Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) -- Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-real-time-image-classification.md b/docs/_posts/2025-05-30-real-time-image-classification.md deleted file mode 100644 index 1ae250eb..00000000 --- a/docs/_posts/2025-05-30-real-time-image-classification.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -title: Running Real-Time Image Classification on Arm Cortex-M with CMSIS-NN -description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. - - - ## Prequisites - - - Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. - - Tooling: - - TensorFlow Lite - - CMSIS-NN - - Keil MDK - - Hardware: - - Arm Cortex-M based microcontroller development board and compatible camera module. - - Access to hardware suitable for training neural networks - - ## Resources from Arm and our partners - - - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) - - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) - - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) - - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description -This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. - - -## Prequisites - -- Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. -- Tooling: - - TensorFlow Lite - - CMSIS-NN - - Keil MDK -- Hardware: - - Arm Cortex-M based microcontroller development board and compatible camera module. - - Access to hardware suitable for training neural networks - -## Resources from Arm and our partners - -- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) -- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) -- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) -- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md b/docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md deleted file mode 100644 index f06944de..00000000 --- a/docs/_posts/2025-05-30-responsible-ai-and-yellow-teaming.md +++ /dev/null @@ -1,144 +0,0 @@ ---- -title: Responsible AI and Yellow Teaming -description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle. -subjects: -- ML -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - - **Why this is important?** - - AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. - - **Project summary** - - This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. - - The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. - - Key Objectives of Your Project - - Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. - - Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. - - Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. - - Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. - - - ## Prequisites - - If deploying a private Llama model -> - - **Hardware**: - - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. - - **Software**: - - PyTorch and Hugging Face account - - `torchchat` repo and dependencies - - Hugging Face CLI for LLM download - - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) - - **Skills**: - - Proficiency in Python and PyTorch - - [Hugging Face account](https://huggingface.co/) - - Understanding of LLMs and prompting techniques - - If using a public LLM -> - - **Hardware**: - - None needed - - **Software**: - - Access to a public LLM - - **Skills**: - - Understanding of LLMs and prompting techniques - - ## Resources from Arm and our partners - - - External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) - - Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) - - Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - - -**Why this is important?** - -AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. - -**Project summary** - -This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. - -The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. - -Key Objectives of Your Project -- Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. -- Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. -- Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. -- Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. - - -## Prequisites - -If deploying a private Llama model -> -- **Hardware**: - - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. -- **Software**: - - PyTorch and Hugging Face account - - `torchchat` repo and dependencies - - Hugging Face CLI for LLM download - - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) -- **Skills**: - - Proficiency in Python and PyTorch - - [Hugging Face account](https://huggingface.co/) - - Understanding of LLMs and prompting techniques - -If using a public LLM -> -- **Hardware**: - - None needed -- **Software**: - - Access to a public LLM -- **Skills**: - - Understanding of LLMs and prompting techniques - -## Resources from Arm and our partners - -- External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) -- Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) -- Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-sentiment-analysis-dashboard.md b/docs/_posts/2025-05-30-sentiment-analysis-dashboard.md deleted file mode 100644 index 5b744374..00000000 --- a/docs/_posts/2025-05-30-sentiment-analysis-dashboard.md +++ /dev/null @@ -1,85 +0,0 @@ ---- -title: Create a Sentiment Analysis Dashboard for Keywords Based on the Semiconductor Industry -description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes. -subjects: -- ML -- Web -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). - - This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. - - ## Prequisites - - - Languages: Intermediate understanding of Python - - Hardware: Access to a computer with internet connectivity and access to cloud instances - - ## Resources from Arm and our partners - - You are free to choose your own implementation details. The resouces below are examples to get started. - - - External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) - - Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description -This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). - -This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. - -## Prequisites - -- Languages: Intermediate understanding of Python -- Hardware: Access to a computer with internet connectivity and access to cloud instances - -## Resources from Arm and our partners - -You are free to choose your own implementation details. The resouces below are examples to get started. - -- External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) -- Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-smart-voice-assistant.md b/docs/_posts/2025-05-30-smart-voice-assistant.md deleted file mode 100644 index 2a3afb0c..00000000 --- a/docs/_posts/2025-05-30-smart-voice-assistant.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -title: Smart Voice Assistant Using TinyML on Cortex-M55 -description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use. -subjects: -- ML -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. - - The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. - - ## Prequisites - - - Languages: Python, C++, Embedded C - - Tooling: TensorFlow Lite for Microcontrollers, Keil MDK - - Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) - - - ## Resources from Arm and our partners - - - Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) - - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) - - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) - - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description -This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. - -The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. - -## Prequisites - -- Languages: Python, C++, Embedded C -- Tooling: TensorFlow Lite for Microcontrollers, Keil MDK -- Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) - - -## Resources from Arm and our partners - -- Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) -- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) -- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) -- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md b/docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md deleted file mode 100644 index 9ef43f0b..00000000 --- a/docs/_posts/2025-05-30-specint2017-benchmarking-on-arm64.md +++ /dev/null @@ -1,126 +0,0 @@ ---- -title: 'SpecINT2017 Benchmarking on Arm64: Evaluating Compiler and Workload Performance' -description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks. -subjects: -- Performance and Architecture -- Migration to Arm -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. - - **Project Summary** - - This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. - - ## Prequisites - - Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) - - Software: Familiarity with performance engineering and a OOP with a language such as C++. - - Compilers: GCC, LLVM/Clang, Arm Compiler for Linux - - Profiling Tools: perf, Arm Performance Libraries - - Workloads: SPEC CPU2017 (academic license required), custom workloads - - ## Resources from Arm and our partners - - - Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) - - - Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) - - - Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) - - - Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) - - - Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) - - - Documentation: [GNU compilers](https://gcc.gnu.org/) - - - Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -### Description - -**Why this is important?** - -SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. - -**Project Summary** - -This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. - -## Prequisites - -Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) - -Software: Familiarity with performance engineering and a OOP with a language such as C++. - -Compilers: GCC, LLVM/Clang, Arm Compiler for Linux - -Profiling Tools: perf, Arm Performance Libraries - -Workloads: SPEC CPU2017 (academic license required), custom workloads - -## Resources from Arm and our partners - -- Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) - -- Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) - -- Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) - -- Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) - -- Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) - -- Documentation: [GNU compilers](https://gcc.gnu.org/) - -- Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-write-a-learning-path.md b/docs/_posts/2025-05-30-write-a-learning-path.md deleted file mode 100644 index 0bde8c2c..00000000 --- a/docs/_posts/2025-05-30-write-a-learning-path.md +++ /dev/null @@ -1,80 +0,0 @@ ---- -title: Write an Educational Tutorial (Learning Path) of your Choice -description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills. -subjects: -- Libraries -- Web -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! - - ## Prequisites - - - Computer with Internet Connectivity - - ## Resources from Arm and our partners - - - Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) - - Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description -This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! - -## Prequisites - -- Computer with Internet Connectivity - -## Resources from Arm and our partners - -- Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) -- Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md deleted file mode 100644 index e9988d3a..00000000 --- a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: C-Based-Application-from-Scratch -description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible. -subjects: -- Performance and Architecture -- Libraries -requires-team: -- No -platform: -- IoT -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-07-11 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. - - **Project Summary** - - This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. - - Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. - - Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. - - - ## Prerequisites - - - Access to a Raspberry Pi device (any generation) - - Intermediate Understanding of the C language - - - ## Resources from Arm and our partners - - - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). - - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - ### Previous Submissions - 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). - 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). ---- -## Description - -**Why this is important?** - -Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. - -**Project Summary** - -This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. - -Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. - -Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. - - -## Prerequisites - -- Access to a Raspberry Pi device (any generation) -- Intermediate Understanding of the C language - - -## Resources from Arm and our partners - -- External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). -- External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - -### Previous Submissions -1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). -2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). \ No newline at end of file diff --git a/docs/_posts/2025-07-11-c-based-application-from-scratch.md b/docs/_posts/2025-07-11-c-based-application-from-scratch.md deleted file mode 100644 index 675a8e33..00000000 --- a/docs/_posts/2025-07-11-c-based-application-from-scratch.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: Create a minimal C-Based Project for Raspberry Pi -description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible. -subjects: -- Performance and Architecture -- Libraries -requires-team: -- No -platform: -- IoT -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-07-11 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. - - **Project Summary** - - This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. - - Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. - - Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. - - - ## Prerequisites - - - Access to a Raspberry Pi device (any generation) - - Intermediate Understanding of the C language - - - ## Resources from Arm and our partners - - - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). - - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - ### Previous Submissions - 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). - 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). ---- -## Description - -**Why this is important?** - -Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. - -**Project Summary** - -This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. - -Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. - -Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. - - -## Prerequisites - -- Access to a Raspberry Pi device (any generation) -- Intermediate Understanding of the C language - - -## Resources from Arm and our partners - -- External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). -- External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - -### Previous Submissions -1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). -2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). \ No newline at end of file diff --git a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md deleted file mode 100644 index b2124229..00000000 --- a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: NPC-LLM-Runtime -description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions. -subjects: -- ML -- Gaming -- Graphics -requires-team: -- No -platform: -- AI -- Mobile, Graphics, and Gaming -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-08-28 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why is this important?** - - Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. - - **Project Summary** - - This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. - - To qualify, your submission should include, where possible: - - - Source code (with clear documentation and build instructions) - - A reproducible setup (e.g. scripts, datasets, or dependencies) - - A supporting document describing the project and design decisions - - High-quality images and a video (≤ 3 minutes) demonstrating the demo in action - - Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. - - ## Prequisites - - Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) - - Experience with integrating machine learning models into real-time applications - - Knowledge of C++, Python, or a game scripting language - - Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) - - Access to hardware capable of running LLM inference locally (PC or mobile) - - - ## Resources from Arm and our partners - - - Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) - - Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) - - Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) - - Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) - - Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) - - Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -**Why is this important?** - -Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. - -**Project Summary** - -This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. - -To qualify, your submission should include, where possible: - -- Source code (with clear documentation and build instructions) -- A reproducible setup (e.g. scripts, datasets, or dependencies) -- A supporting document describing the project and design decisions -- High-quality images and a video (≤ 3 minutes) demonstrating the demo in action - -Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. - -## Prequisites -- Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) -- Experience with integrating machine learning models into real-time applications -- Knowledge of C++, Python, or a game scripting language -- Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) -- Access to hardware capable of running LLM inference locally (PC or mobile) - - -## Resources from Arm and our partners - -- Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) -- Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) -- Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) -- Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) -- Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) -- Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-08-28-npc-llm-runtime.md b/docs/_posts/2025-08-28-npc-llm-runtime.md deleted file mode 100644 index b3090b32..00000000 --- a/docs/_posts/2025-08-28-npc-llm-runtime.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: On-Device LLMs for Real-Time NPC Interaction in Games -description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions. -subjects: -- ML -- Gaming -- Graphics -requires-team: -- No -platform: -- AI -- Mobile, Graphics, and Gaming -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-08-28 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why is this important?** - - Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. - - **Project Summary** - - This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. - - To qualify, your submission should include, where possible: - - - Source code (with clear documentation and build instructions) - - A reproducible setup (e.g. scripts, datasets, or dependencies) - - A supporting document describing the project and design decisions - - High-quality images and a video (≤ 3 minutes) demonstrating the demo in action - - Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. - - ## Prequisites - - Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) - - Experience with integrating machine learning models into real-time applications - - Knowledge of C++, Python, or a game scripting language - - Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) - - Access to hardware capable of running LLM inference locally (PC or mobile) - - - ## Resources from Arm and our partners - - - Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) - - Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) - - Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) - - Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) - - Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) - - Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- -## Description - -**Why is this important?** - -Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. - -**Project Summary** - -This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. - -To qualify, your submission should include, where possible: - -- Source code (with clear documentation and build instructions) -- A reproducible setup (e.g. scripts, datasets, or dependencies) -- A supporting document describing the project and design decisions -- High-quality images and a video (≤ 3 minutes) demonstrating the demo in action - -Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. - -## Prequisites -- Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) -- Experience with integrating machine learning models into real-time applications -- Knowledge of C++, Python, or a game scripting language -- Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) -- Access to hardware capable of running LLM inference locally (PC or mobile) - - -## Resources from Arm and our partners - -- Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) -- Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) -- Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) -- Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) -- Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) -- Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) - - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-03-Python-Porting-Challenge.md b/docs/_posts/2025-11-03-Python-Porting-Challenge.md deleted file mode 100644 index 238998f6..00000000 --- a/docs/_posts/2025-11-03-Python-Porting-Challenge.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -title: Python-Porting-Challenge -description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support. -subjects: -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-03 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* - - ## Description - - Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. - - This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. - - Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. - - Key Objectives: - - - Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). - - Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. - - Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. - - - ## Prequisites - - - Intermediate to advance understanding of the Python language - - Some experience on creating python packages and continuous integration testing. - - If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). - - ## Resources from Arm and our partners - - - External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) - - External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) - - Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) - - Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) - - External Documentation: [Status of Python versions](https://devguide.python.org/versions/) - - GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) - - - ## Support Level - - If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* - -## Description - -Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. - -This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. - -Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. - -Key Objectives: - -- Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). -- Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. -- Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. - - -## Prequisites - -- Intermediate to advance understanding of the Python language -- Some experience on creating python packages and continuous integration testing. -- If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). - -## Resources from Arm and our partners - -- External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) -- External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) -- External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) -- Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) -- Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) -- External Documentation: [Status of Python versions](https://devguide.python.org/versions/) -- GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) - - -## Support Level - -If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-03-python-porting-challenge.md b/docs/_posts/2025-11-03-python-porting-challenge.md deleted file mode 100644 index 4950ccd1..00000000 --- a/docs/_posts/2025-11-03-python-porting-challenge.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -title: Python Package Porting Challenge -description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support. -subjects: -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-03 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* - - ## Description - - Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. - - This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. - - Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. - - Key Objectives: - - - Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). - - Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. - - Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. - - - ## Prequisites - - - Intermediate to advance understanding of the Python language - - Some experience on creating python packages and continuous integration testing. - - If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). - - ## Resources from Arm and our partners - - - External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) - - External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) - - Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) - - Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) - - External Documentation: [Status of Python versions](https://devguide.python.org/versions/) - - GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) - - - ## Support Level - - If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* - -## Description - -Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. - -This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. - -Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. - -Key Objectives: - -- Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). -- Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. -- Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. - - -## Prequisites - -- Intermediate to advance understanding of the Python language -- Some experience on creating python packages and continuous integration testing. -- If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). - -## Resources from Arm and our partners - -- External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) -- External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) -- External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) -- Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) -- Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) -- External Documentation: [Status of Python versions](https://devguide.python.org/versions/) -- GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) - - -## Support Level - -If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md deleted file mode 100644 index 27efe0e4..00000000 --- a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md +++ /dev/null @@ -1,136 +0,0 @@ ---- -title: Always-On-AI-with-Ethos-U85-NPU -description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. -subjects: -- ML -- Performance and Architecture -- Embedded Linux -- RTOS Fundamentals -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why is this important?** - - The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: - - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. - - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. - - This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. - - **Project Summary** - - Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: - - 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. - 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. - 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). - - *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* - - Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. - - You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. - - Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? - - ## What will you use? - You should either be familiar with, or willing to learn about, the following: - - Programming: Python, C++, Embedded C - - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. - - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS - - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why is this important?** - -The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: -- A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. -- When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. - -This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. - -**Project Summary** - -Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: - -1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. -2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. -3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). - -*Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* - -Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. - -You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. - -Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? - -## What will you use? -You should either be familiar with, or willing to learn about, the following: -- Programming: Python, C++, Embedded C -- ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. -- Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS - - -## Resources from Arm and our partners -- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) -- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) -- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md deleted file mode 100644 index 458a8f1c..00000000 --- a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md +++ /dev/null @@ -1,130 +0,0 @@ ---- -title: Edge-AI-On-Mobile -description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required. -subjects: -- ML -- Performance and Architecture -- Libraries -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- AI -- IoT -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - ### Why is this important? - - SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. - - [SME2](https://www.arm.com/technologies/sme2) - - The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. - - [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) - - ### Project Summary - - Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. - - Example project areas: - - Real-time video semantic segmentation (e.g., background removal + AR compositing) - - Live object detection + natural-language description (text summary of what the camera sees) - - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition - - On-device lightweight LLM or encoder-only transformer processing for mobile assistants - - Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. - - Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. - - --- - - ## Resources from Arm and our partners - - - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) - - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) - - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) - - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) - - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - --- ---- -## Description - -### Why is this important? - -SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. - -[SME2](https://www.arm.com/technologies/sme2) - -The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. - -[vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) - -### Project Summary - -Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. - -Example project areas: - - Real-time video semantic segmentation (e.g., background removal + AR compositing) - - Live object detection + natural-language description (text summary of what the camera sees) - - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition - - On-device lightweight LLM or encoder-only transformer processing for mobile assistants - -Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. - -Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. - ---- - -## Resources from Arm and our partners - -- Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) -- Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) -- Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) -- Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) -- Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) - ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - ---- \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md deleted file mode 100644 index a620c800..00000000 --- a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md +++ /dev/null @@ -1,213 +0,0 @@ ---- -title: Ethos-U85-NPU-Applications -description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why is this important?** - - The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. - - This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. - - [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) - - **Project Summary** - - Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. - - Your project should include: - - 1. Model Deployment and Optimization - Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: - - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. - - The Vela compiler for optimization. - - These tools can be used to: - - Convert and visualize model graphs in TOSA format. - - Identify unsupported operators. - - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. - - Run Vela for optimized compilation targeting Ethos-U85. - - 2. Application Demonstration - Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: - - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). - - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. - - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. - - 3. Analysis and Benchmarking - Report quantitative results on: - - Inference latency, throughput (FPS or tokens/s), and memory footprint. - - Power efficiency under load (optional). - - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). - - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. - - --- - - ## What kind of projects should you target? - - To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: - - - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. - - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. - - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. - - *Example:* 512×512 semantic segmentation or multi-object detection. - - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. - - *Example:* large MLP heads or transformer token mixers. - - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. - - The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. - - --- - - ## What will you use? - You should be familiar with, or willing to learn about: - - Programming: Python, C/C++ - - ExecuTorch or TensorFlow Lite (Micro/LiteRT) - - Techniques for optimising AI models for the edge (quantization, pruning, etc.) - - Optimization Tools: - - TOSA Model Explorer - - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) - - Vela compiler for Ethos-U - - Bare-metal or RTOS (e.g., Zephyr) - - --- - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -**Why is this important?** - -The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. - -This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. - -[Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) - -**Project Summary** - -Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. - -Your project should include: - -1. Model Deployment and Optimization - Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: - - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. - - The Vela compiler for optimization. - - These tools can be used to: - - Convert and visualize model graphs in TOSA format. - - Identify unsupported operators. - - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. - - Run Vela for optimized compilation targeting Ethos-U85. - -2. Application Demonstration - Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: - - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). - - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. - - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. - -3. Analysis and Benchmarking - Report quantitative results on: - - Inference latency, throughput (FPS or tokens/s), and memory footprint. - - Power efficiency under load (optional). - - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). - - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. - ---- - -## What kind of projects should you target? - -To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: - -- Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. - - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. -- High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. - - *Example:* 512×512 semantic segmentation or multi-object detection. -- Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. - - *Example:* large MLP heads or transformer token mixers. -- Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. - -The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. - ---- - -## What will you use? -You should be familiar with, or willing to learn about: -- Programming: Python, C/C++ -- ExecuTorch or TensorFlow Lite (Micro/LiteRT) -- Techniques for optimising AI models for the edge (quantization, pruning, etc.) -- Optimization Tools: - - TOSA Model Explorer - - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) - - Vela compiler for Ethos-U -- Bare-metal or RTOS (e.g., Zephyr) - ---- - -## Resources from Arm and our partners -- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) -- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) -- Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) -- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md deleted file mode 100644 index 99c8e712..00000000 --- a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md +++ /dev/null @@ -1,157 +0,0 @@ ---- -title: Game development using Arm Neural Graphics with Unreal Engine -description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline. -subjects: -- ML -- Gaming -- Libraries -- Graphics -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - ### Why is this important? - - Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. - - Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. - - [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) - - Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. - - Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. - - ### Project Summary - - Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: - - **Near-identical visuals at lower resolution** (render low → upscale with NSS) - - Document your progress and findings and consider alternative applications of the neural technology within games development. - - Attempt different environments and objects. For example: - - - Daytime vs night - - Urban city, jungle forest, ocean floor, alien planet, building interiors - - Complex lighting and shadows - - NPCs with detailed clothing, faces, hair. Include animations. - - Make your scenes dynamic with particle effects, shadows, physics and motion. - - --- - - ## Pre-requisites - - Laptop/PC/Mobile for Android Unreal Engine game development - - Willingness to learn about games development and graphics, and the increasing use of AI in these fields. - - --- - - ## Resources from Arm and partners - - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) - - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) - - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) - - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) - - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) - - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) - - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) - - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) - - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) - - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -### Why is this important? - -Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. - -Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. - -[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) - -Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. - -Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. - -### Project Summary - -Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: -- **Near-identical visuals at lower resolution** (render low → upscale with NSS) - -Document your progress and findings and consider alternative applications of the neural technology within games development. - -Attempt different environments and objects. For example: - -- Daytime vs night -- Urban city, jungle forest, ocean floor, alien planet, building interiors -- Complex lighting and shadows -- NPCs with detailed clothing, faces, hair. Include animations. - -Make your scenes dynamic with particle effects, shadows, physics and motion. - ---- - -## Pre-requisites -- Laptop/PC/Mobile for Android Unreal Engine game development -- Willingness to learn about games development and graphics, and the increasing use of AI in these fields. - ---- - -## Resources from Arm and partners -- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) -- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) -- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) -- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) -- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) -- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) -- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) -- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) -- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) -- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) - ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md deleted file mode 100644 index 58bcca09..00000000 --- a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md +++ /dev/null @@ -1,158 +0,0 @@ ---- -title: Game-Dev-Using-Neural-Graphics-&-Unreal-Engine -description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline. -subjects: -- ML -- Gaming -- Libraries -- Graphics -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - ### Why is this important? - - Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. - - Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. - - [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) - - Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. - - Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. - - ### Project Summary - - Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: - - **Near-identical visuals at lower resolution** (render low → upscale with NSS) - - Document your progress and findings and consider alternative applications of the neural technology within games development. - - Attempt different environments and objects. For example: - - - Daytime vs night - - Urban city, jungle forest, ocean floor, alien planet, building interiors - - Complex lighting and shadows - - NPCs with detailed clothing, faces, hair. Include animations. - - Make your scenes dynamic with particle effects, shadows, physics and motion. - - --- - - ## Pre-requisites - - Laptop/PC/Mobile for Android Unreal Engine game development - - Willingness to learn about games development and graphics, and the increasing use of AI in these fields. - - --- - - ## Resources from Arm and partners - - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) - - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) - - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) - - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) - - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) - - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) - - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) - - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) - - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) - - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -### Why is this important? - -Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. - -Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. - -[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) - -Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. - -Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. - -### Project Summary - -Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: -- **Near-identical visuals at lower resolution** (render low → upscale with NSS) - -Document your progress and findings and consider alternative applications of the neural technology within games development. - -Attempt different environments and objects. For example: - -- Daytime vs night -- Urban city, jungle forest, ocean floor, alien planet, building interiors -- Complex lighting and shadows -- NPCs with detailed clothing, faces, hair. Include animations. - -Make your scenes dynamic with particle effects, shadows, physics and motion. - ---- - -## Pre-requisites -- Laptop/PC/Mobile for Android Unreal Engine game development -- Willingness to learn about games development and graphics, and the increasing use of AI in these fields. - ---- - -## Resources from Arm and partners -- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) -- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) -- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) -- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) -- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) -- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) -- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) -- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) -- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) -- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) - ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md b/docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md deleted file mode 100644 index db0251b6..00000000 --- a/docs/_posts/2025-11-27-always-on-ai-with-ethos-u85-npu.md +++ /dev/null @@ -1,136 +0,0 @@ ---- -title: 'Edge AI with NPU: always-on-AI with ExecuTorch on Cortex-M55 + Ethos-U85 → Cortex-A' -description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. -subjects: -- ML -- Performance and Architecture -- Embedded Linux -- RTOS Fundamentals -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why is this important?** - - The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: - - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. - - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. - - This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. - - **Project Summary** - - Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: - - 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. - 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. - 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). - - *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* - - Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. - - You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. - - Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? - - ## What will you use? - You should either be familiar with, or willing to learn about, the following: - - Programming: Python, C++, Embedded C - - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. - - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS - - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - - -## Description - -**Why is this important?** - -The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: -- A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. -- When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. - -This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. - -**Project Summary** - -Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: - -1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. -2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. -3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). - -*Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* - -Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. - -You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. - -Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? - -## What will you use? -You should either be familiar with, or willing to learn about, the following: -- Programming: Python, C++, Embedded C -- ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. -- Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS - - -## Resources from Arm and our partners -- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) -- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) -- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-edge-ai-on-mobile.md b/docs/_posts/2025-11-27-edge-ai-on-mobile.md deleted file mode 100644 index f90f03fc..00000000 --- a/docs/_posts/2025-11-27-edge-ai-on-mobile.md +++ /dev/null @@ -1,130 +0,0 @@ ---- -title: 'SME2 on vivo X300: Mobile Edge AI Projects for multi-modal inference, built on Arm Lumex' -description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required. -subjects: -- ML -- Performance and Architecture -- Libraries -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- AI -- IoT -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - ### Why is this important? - - SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. - - [SME2](https://www.arm.com/technologies/sme2) - - The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. - - [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) - - ### Project Summary - - Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. - - Example project areas: - - Real-time video semantic segmentation (e.g., background removal + AR compositing) - - Live object detection + natural-language description (text summary of what the camera sees) - - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition - - On-device lightweight LLM or encoder-only transformer processing for mobile assistants - - Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. - - Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. - - --- - - ## Resources from Arm and our partners - - - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) - - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) - - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) - - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) - - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - --- ---- -## Description - -### Why is this important? - -SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. - -[SME2](https://www.arm.com/technologies/sme2) - -The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. - -[vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) - -### Project Summary - -Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. - -Example project areas: - - Real-time video semantic segmentation (e.g., background removal + AR compositing) - - Live object detection + natural-language description (text summary of what the camera sees) - - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition - - On-device lightweight LLM or encoder-only transformer processing for mobile assistants - -Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. - -Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. - ---- - -## Resources from Arm and our partners - -- Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) -- Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) -- Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) -- Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) -- Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) - ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - ---- \ No newline at end of file diff --git a/docs/_posts/2025-11-27-ethos-u85-npu-applications.md b/docs/_posts/2025-11-27-ethos-u85-npu-applications.md deleted file mode 100644 index 2d3b1edb..00000000 --- a/docs/_posts/2025-11-27-ethos-u85-npu-applications.md +++ /dev/null @@ -1,213 +0,0 @@ ---- -title: 'Ethos-U85 NPU Applications with TOSA Model Explorer: Exploring Next-Gen Edge AI Inference' -description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why is this important?** - - The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. - - This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. - - [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) - - **Project Summary** - - Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. - - Your project should include: - - 1. Model Deployment and Optimization - Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: - - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. - - The Vela compiler for optimization. - - These tools can be used to: - - Convert and visualize model graphs in TOSA format. - - Identify unsupported operators. - - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. - - Run Vela for optimized compilation targeting Ethos-U85. - - 2. Application Demonstration - Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: - - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). - - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. - - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. - - 3. Analysis and Benchmarking - Report quantitative results on: - - Inference latency, throughput (FPS or tokens/s), and memory footprint. - - Power efficiency under load (optional). - - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). - - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. - - --- - - ## What kind of projects should you target? - - To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: - - - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. - - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. - - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. - - *Example:* 512×512 semantic segmentation or multi-object detection. - - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. - - *Example:* large MLP heads or transformer token mixers. - - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. - - The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. - - --- - - ## What will you use? - You should be familiar with, or willing to learn about: - - Programming: Python, C/C++ - - ExecuTorch or TensorFlow Lite (Micro/LiteRT) - - Techniques for optimising AI models for the edge (quantization, pruning, etc.) - - Optimization Tools: - - TOSA Model Explorer - - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) - - Vela compiler for Ethos-U - - Bare-metal or RTOS (e.g., Zephyr) - - --- - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -**Why is this important?** - -The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. - -This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. - -[Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) - -**Project Summary** - -Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. - -Your project should include: - -1. Model Deployment and Optimization - Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: - - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. - - The Vela compiler for optimization. - - These tools can be used to: - - Convert and visualize model graphs in TOSA format. - - Identify unsupported operators. - - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. - - Run Vela for optimized compilation targeting Ethos-U85. - -2. Application Demonstration - Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: - - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). - - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. - - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. - -3. Analysis and Benchmarking - Report quantitative results on: - - Inference latency, throughput (FPS or tokens/s), and memory footprint. - - Power efficiency under load (optional). - - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). - - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. - ---- - -## What kind of projects should you target? - -To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: - -- Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. - - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. -- High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. - - *Example:* 512×512 semantic segmentation or multi-object detection. -- Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. - - *Example:* large MLP heads or transformer token mixers. -- Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. - -The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. - ---- - -## What will you use? -You should be familiar with, or willing to learn about: -- Programming: Python, C/C++ -- ExecuTorch or TensorFlow Lite (Micro/LiteRT) -- Techniques for optimising AI models for the edge (quantization, pruning, etc.) -- Optimization Tools: - - TOSA Model Explorer - - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) - - Vela compiler for Ethos-U -- Bare-metal or RTOS (e.g., Zephyr) - ---- - -## Resources from Arm and our partners -- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) -- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) -- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) -- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) -- Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) -- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md b/docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md deleted file mode 100644 index 24988e84..00000000 --- a/docs/_posts/2025-11-27-game-dev-using-neural-graphics---unreal-engine.md +++ /dev/null @@ -1,158 +0,0 @@ ---- -title: Game development using Arm Neural Graphics with Unreal Engine -description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline. -subjects: -- ML -- Gaming -- Libraries -- Graphics -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - ### Why is this important? - - Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. - - Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. - - [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) - - Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. - - Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. - - ### Project Summary - - Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: - - **Near-identical visuals at lower resolution** (render low → upscale with NSS) - - Document your progress and findings and consider alternative applications of the neural technology within games development. - - Attempt different environments and objects. For example: - - - Daytime vs night - - Urban city, jungle forest, ocean floor, alien planet, building interiors - - Complex lighting and shadows - - NPCs with detailed clothing, faces, hair. Include animations. - - Make your scenes dynamic with particle effects, shadows, physics and motion. - - --- - - ## Pre-requisites - - Laptop/PC/Mobile for Android Unreal Engine game development - - Willingness to learn about games development and graphics, and the increasing use of AI in these fields. - - --- - - ## Resources from Arm and partners - - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) - - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) - - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) - - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) - - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) - - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) - - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) - - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) - - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) - - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- - - -## Description - -### Why is this important? - -Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. - -Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. - -[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) - -Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. - -Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. - -### Project Summary - -Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: -- **Near-identical visuals at lower resolution** (render low → upscale with NSS) - -Document your progress and findings and consider alternative applications of the neural technology within games development. - -Attempt different environments and objects. For example: - -- Daytime vs night -- Urban city, jungle forest, ocean floor, alien planet, building interiors -- Complex lighting and shadows -- NPCs with detailed clothing, faces, hair. Include animations. - -Make your scenes dynamic with particle effects, shadows, physics and motion. - ---- - -## Pre-requisites -- Laptop/PC/Mobile for Android Unreal Engine game development -- Willingness to learn about games development and graphics, and the increasing use of AI in these fields. - ---- - -## Resources from Arm and partners -- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) -- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) -- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) -- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) -- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) -- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) -- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) -- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) -- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) -- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) - ---- - -## Support Level - -This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - -## Benefits - -Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file From ee99d0856920e6d204f62bdc011e2fbfdfdebe3f Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 11:50:03 +0000 Subject: [PATCH 89/98] Add files via upload --- docs/_posts/2025-05-30-AI-Agents.md | 113 ++++++++++ .../2025-05-30-AI-Powered-Porting-Tool.md | 123 ++++++++++ .../2025-05-30-AMBA-Simulator-Framework.md | 99 ++++++++ .../2025-05-30-Academic-Trends-Dashboard.md | 98 ++++++++ ...25-05-30-Architecture-Insight-Dashboard.md | 116 ++++++++++ .../2025-05-30-Arduino-IDE-Windows-on-Arm.md | 125 ++++++++++ ...5-05-30-Bioinformatic-Pipeline-Analysis.md | 126 +++++++++++ ...30-Compliance-Ready-Smart-Camera-System.md | 97 ++++++++ .../2025-05-30-FPGA-Accellerator-with-DDR.md | 76 +++++++ docs/_posts/2025-05-30-HPC-Algorithm.md | 95 ++++++++ ...5-05-30-Haskell-Compiler-Windows-on-Arm.md | 125 ++++++++++ .../2025-05-30-Human-Centric-Robotics.md | 113 ++++++++++ .../2025-05-30-LLM-Benchmark-on-Arm-Server.md | 79 +++++++ ...-05-30-Machine-Learning-on-AWS-Graviton.md | 111 +++++++++ ...-05-30-Processor-in-the-Loop-Automotive.md | 121 ++++++++++ .../2025-05-30-Quantisation-Aware-Training.md | 106 +++++++++ .../2025-05-30-R-Arm-Community-Support.md | 140 ++++++++++++ ...25-05-30-Real-Time-Image-Classification.md | 98 ++++++++ ...05-30-Responsible-AI-and-Yellow-Teaming.md | 144 ++++++++++++ ...2025-05-30-Sentiment-Analysis-Dashboard.md | 85 +++++++ .../2025-05-30-Smart-Voice-Assistant.md | 93 ++++++++ ...05-30-SpecINT2017-benchmarking-on-Arm64.md | 126 +++++++++++ .../2025-05-30-Write-A-Learning-Path.md | 80 +++++++ docs/_posts/2025-05-30-projects.md | 13 ++ ...-07-11-C-Based-Application-from-Scratch.md | 103 +++++++++ docs/_posts/2025-08-28-NPC-LLM-Runtime.md | 122 ++++++++++ .../2025-11-03-Python-Porting-Challenge.md | 118 ++++++++++ ...5-11-27-Always-On-AI-with-Ethos-U85-NPU.md | 136 +++++++++++ docs/_posts/2025-11-27-Edge-AI-On-Mobile.md | 130 +++++++++++ .../2025-11-27-Ethos-U85-NPU-Applications.md | 213 ++++++++++++++++++ ...Dev-Using-Neural-Graphics-Unreal-Engine.md | 158 +++++++++++++ 31 files changed, 3482 insertions(+) create mode 100644 docs/_posts/2025-05-30-AI-Agents.md create mode 100644 docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md create mode 100644 docs/_posts/2025-05-30-AMBA-Simulator-Framework.md create mode 100644 docs/_posts/2025-05-30-Academic-Trends-Dashboard.md create mode 100644 docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md create mode 100644 docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md create mode 100644 docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md create mode 100644 docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md create mode 100644 docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md create mode 100644 docs/_posts/2025-05-30-HPC-Algorithm.md create mode 100644 docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md create mode 100644 docs/_posts/2025-05-30-Human-Centric-Robotics.md create mode 100644 docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md create mode 100644 docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md create mode 100644 docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md create mode 100644 docs/_posts/2025-05-30-Quantisation-Aware-Training.md create mode 100644 docs/_posts/2025-05-30-R-Arm-Community-Support.md create mode 100644 docs/_posts/2025-05-30-Real-Time-Image-Classification.md create mode 100644 docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md create mode 100644 docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md create mode 100644 docs/_posts/2025-05-30-Smart-Voice-Assistant.md create mode 100644 docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md create mode 100644 docs/_posts/2025-05-30-Write-A-Learning-Path.md create mode 100644 docs/_posts/2025-05-30-projects.md create mode 100644 docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md create mode 100644 docs/_posts/2025-08-28-NPC-LLM-Runtime.md create mode 100644 docs/_posts/2025-11-03-Python-Porting-Challenge.md create mode 100644 docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md create mode 100644 docs/_posts/2025-11-27-Edge-AI-On-Mobile.md create mode 100644 docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md create mode 100644 docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md diff --git a/docs/_posts/2025-05-30-AI-Agents.md b/docs/_posts/2025-05-30-AI-Agents.md new file mode 100644 index 00000000..72172e58 --- /dev/null +++ b/docs/_posts/2025-05-30-AI-Agents.md @@ -0,0 +1,113 @@ +--- +title: AI-Agents +description: This self-service project builds a sandboxed AI agent on Arm hardware that harnesses appropriately sized LLMs to safely automate complex workflows—from DevOps pipelines to e-commerce tasks—demonstrating secure, efficient automation on accessible Arm platforms. +subjects: +- ML +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +badges: trending +layout: article +sidebar: + nav: projects +full_description: |- + ### Description + + **Why this is important?** + + AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). + + **Project Summary** + + Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. + + The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails + + ## Prerequisites + + - Intermediate understanding in an OOP language such as Python (for front-end, if needed). + - Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. + - Access to a LLM (e.g., through an API or on-device LLM) + - Optional API access to target workflow tools such as Jira, Jenkins etc. + + + ## Resources from Arm and our partners + + - Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) + + - Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + ### Previous Submissions + 1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) + + 2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) + + + 3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) +--- +### Description + +**Why this is important?** + +AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). + +**Project Summary** + +Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. + +The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails + +## Prerequisites + +- Intermediate understanding in an OOP language such as Python (for front-end, if needed). +- Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. +- Access to a LLM (e.g., through an API or on-device LLM) +- Optional API access to target workflow tools such as Jira, Jenkins etc. + + +## Resources from Arm and our partners + +- Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) + +- Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + +### Previous Submissions +1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) + +2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) + + +3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) \ No newline at end of file diff --git a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md new file mode 100644 index 00000000..82c890c4 --- /dev/null +++ b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md @@ -0,0 +1,123 @@ +--- +title: AI-Powered-Porting-Tool +description: This self-service project creates an AI-driven porting engine that analyzes package dependencies, auto-generates fixes, and submits pull requests—accelerating native macOS and Windows-on-Arm support for bioinformatics and R software so researchers can run demanding workflows directly on modern Arm devices. +subjects: +- CI-CD +- ML +- Migration to Arm +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why this is important?** + + Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. + + **Project Summary** + + This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). + + Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. + The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: + + - Identify unported packages + - Trace recursive dependency issues + - Recommend or auto-generate build recipes and steps + - Evaluate build success and reattempt intelligently + - Generate pull requests when confident of a fix. + - For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. + - Be extensible to work with various packaging systems and languages + + This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. + + ## Prerequisites + + - Access to Apple Silicon or Windows on Arm machine. + - Familiarity with Python, Bash and Nextflow + - Familiar with genomics/bioinformatics or statistics with the R language. + - Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. + + + ## Resources from Arm and our partners + + - External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) + - Documentation: [nf-core documentation](https://nf-co.re/docs/) + - External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages + - Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description + +**Why this is important?** + +Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. + +**Project Summary** + +This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). + +Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. +The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: + +- Identify unported packages +- Trace recursive dependency issues +- Recommend or auto-generate build recipes and steps +- Evaluate build success and reattempt intelligently +- Generate pull requests when confident of a fix. +- For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. +- Be extensible to work with various packaging systems and languages + +This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. + +## Prerequisites + +- Access to Apple Silicon or Windows on Arm machine. +- Familiarity with Python, Bash and Nextflow +- Familiar with genomics/bioinformatics or statistics with the R language. +- Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. + + +## Resources from Arm and our partners + +- External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) +- Documentation: [nf-core documentation](https://nf-co.re/docs/) +- External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages +- Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md new file mode 100644 index 00000000..3b66cc46 --- /dev/null +++ b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md @@ -0,0 +1,99 @@ +--- +title: AMBA-Simulator-Framework +description: This self-guided hardware project has you implement, simulate, and FPGA-prototype a Verilog AMBA bus—from simple APB to advanced CHI—sharpening hands-on expertise with Arm’s interconnect backbone and yielding a reusable reference design for future embedded systems. +subjects: +- Virtual Hardware +- Performance and Architecture +requires-team: +- No +platform: +- Embedded and Microcontrollers +sw-hw: +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Audience + Electronic Engineering + + ## Description + This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. + + The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. + + ## Prequisites + + - Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). + - Access and basic understanding of ModelSim, Quartus and Vivado + - Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools + + ## Resources from Arm and our partners + + + - Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) + - Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Previous Submissions + + Similar projects: + - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +## Audience +Electronic Engineering + +## Description +This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. + +The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. + +## Prequisites + +- Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). +- Access and basic understanding of ModelSim, Quartus and Vivado +- Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools + +## Resources from Arm and our partners + + +- Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) +- Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Previous Submissions + +Similar projects: + - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md new file mode 100644 index 00000000..b79f62bc --- /dev/null +++ b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md @@ -0,0 +1,98 @@ +--- +title: Academic-Trends-Dashboard +description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends. +subjects: +- Web +- Databases +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + **Why this is important?** + + The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. + + **Project Summary** + + The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). + + + ## Prequisites + + - Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. + - Hardware: Access to a computer with internet connectivity + - API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. + + ## Resources from Arm and our partners + + - Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) + - Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) + - Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +## Description + +**Why this is important?** + +The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. + +**Project Summary** + +The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). + + +## Prequisites + +- Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. +- Hardware: Access to a computer with internet connectivity +- API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. + +## Resources from Arm and our partners + +- Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) +- Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) +- Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. + + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md new file mode 100644 index 00000000..16b56442 --- /dev/null +++ b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md @@ -0,0 +1,116 @@ +--- +title: Architecture-Insight-Dashboard +description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best. +subjects: +- Performance and Architecture +- Web +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ### Description + + **Why this is important?** + + Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. + + **Project Summary** + + This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: + + - Popularity of Arm architectures and Operating System combinations over time + - Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" + + + Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. + + ## Prequisites + + You are free to explore your own implementation. The skills below are examples. + + - Intemediate understanding of an OOP language such as Python or JavaScript + - Access to a computer with internet connectivity + + + ## Resources from Arm and our partners + + - Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) + - Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) + - Website: ["Can I Use?" dashboard](https://caniuse.com/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +### Description + +**Why this is important?** + +Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. + +**Project Summary** + +This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: + +- Popularity of Arm architectures and Operating System combinations over time +- Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" + + +Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. + +## Prequisites + +You are free to explore your own implementation. The skills below are examples. + +- Intemediate understanding of an OOP language such as Python or JavaScript +- Access to a computer with internet connectivity + + +## Resources from Arm and our partners + +- Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) +- Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) +- Website: ["Can I Use?" dashboard](https://caniuse.com/) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md new file mode 100644 index 00000000..a1e2146a --- /dev/null +++ b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md @@ -0,0 +1,125 @@ +--- +title: Arduino-IDE-Windows-on-Arm +description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions. +subjects: +- Performance and Architecture +- Migration to Arm +- Libraries +requires-team: +- No +platform: +- Laptops and Desktops +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +badges: +- trending +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important?** + + Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. + + **Project summary** + + This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). + + ### Key Objectives: + - Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. + - Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. + - Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. + - Submit upstream patches and document issues to support long-term ecosystem health. + + This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. + + ## Prequisites + + + - Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) + - Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM + - Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` + - Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. + + + ## Resources from Arm and our partners + + - Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) + - Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) + - External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) + - Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform + - External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + - Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description + +**Why this is important?** + +Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. + +**Project summary** + +This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). + +### Key Objectives: +- Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. +- Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. +- Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. +- Submit upstream patches and document issues to support long-term ecosystem health. + +This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. + +## Prequisites + + +- Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) +- Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM +- Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` +- Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. + + +## Resources from Arm and our partners + +- Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) +- Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) +- External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) +- Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform +- External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) +- Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md new file mode 100644 index 00000000..66ef2921 --- /dev/null +++ b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md @@ -0,0 +1,126 @@ +--- +title: Bioinformatic-Pipeline-Analysis +description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines. +subjects: +- Performance and Architecture +- Databases +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ### Description + + **Why this is important?** + + Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. + + **Project summary** + + This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. + + The deliverables of the project are as follows: + + - Selection and justification of public genomic datasets. + - Execution of bioinformatics workflows using Bioconda packages on Arm64. + - Performance benchmarking and comparison with x86 architectures. + - Documentation of failed package builds and proposed fixes. + - Comprehensive report with results, analysis, and recommendations. + + + ## Prequisites + + - Intermediate understanding of Python, Bash and nextflow + - Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake + - Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage + - IP access to Public genomic databases (NCBI, ENA, etc.) + + ## Resources from Arm and our partners + + - External Documentation: [nf-core documentation](https://nf-co.re/docs/) + + - External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) + + - Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) + + - Repository: [Bioconda package repository](https://bioconda.github.io/) + + - Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +### Description + +**Why this is important?** + +Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. + +**Project summary** + +This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. + +The deliverables of the project are as follows: + +- Selection and justification of public genomic datasets. +- Execution of bioinformatics workflows using Bioconda packages on Arm64. +- Performance benchmarking and comparison with x86 architectures. +- Documentation of failed package builds and proposed fixes. +- Comprehensive report with results, analysis, and recommendations. + + +## Prequisites + +- Intermediate understanding of Python, Bash and nextflow +- Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake +- Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage +- IP access to Public genomic databases (NCBI, ENA, etc.) + +## Resources from Arm and our partners + +- External Documentation: [nf-core documentation](https://nf-co.re/docs/) + +- External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) + +- Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) + +- Repository: [Bioconda package repository](https://bioconda.github.io/) + +- Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) + + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md new file mode 100644 index 00000000..316c87a8 --- /dev/null +++ b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md @@ -0,0 +1,97 @@ +--- +title: Compliance-Ready-Smart-Camera-System +description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles. +subjects: +- Security +- Embedded Linux +- ML +- Virtual Hardware +requires-team: +- Yes +platform: +- Mobile, Graphics, and Gaming +- Automotive +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +- Direct Support from Arm +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. + + + ## Description + + **Why this is important?** + + As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. + + **Project summary** + + Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. + + Deliverables include: + - Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) + - A survey of regulatory requirements and their impact on smart camera design + - An architectural analysis integrating Arm-based systems into a compliant automotive software stack + - Recommendations for enhancing developer tools and reference software stacks to align with ISO standards + + ## Estimated Project Duration + - Estimated Time: 6+ months + - Participants: Team of 2+ + + ## Resources from Arm and Arm partners + - Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) + - Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. + + +## Description + +**Why this is important?** + +As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. + +**Project summary** + +Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. + +Deliverables include: +- Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) +- A survey of regulatory requirements and their impact on smart camera design +- An architectural analysis integrating Arm-based systems into a compliant automotive software stack +- Recommendations for enhancing developer tools and reference software stacks to align with ISO standards + +## Estimated Project Duration +- Estimated Time: 6+ months +- Participants: Team of 2+ + +## Resources from Arm and Arm partners +- Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) +- Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md new file mode 100644 index 00000000..4890eb01 --- /dev/null +++ b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md @@ -0,0 +1,76 @@ +--- +title: FPGA-Accellerator-with-DDR +description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance. +subjects: +- Virtual Hardware +- Performance and Architecture +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +sw-hw: +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). + + + ## Prequisites + + - Languages: Verilog, SystemVerilog + - Tooling: Vivado, ModelSim, ASIC design tools + - Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform + - IP access: Arm Academic Access member (link to get if they don't have it) + + ## Resources from Arm and our partners + + - External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description +This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). + + +## Prequisites + +- Languages: Verilog, SystemVerilog +- Tooling: Vivado, ModelSim, ASIC design tools +- Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform +- IP access: Arm Academic Access member (link to get if they don't have it) + +## Resources from Arm and our partners + +- External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-HPC-Algorithm.md b/docs/_posts/2025-05-30-HPC-Algorithm.md new file mode 100644 index 00000000..3445c23e --- /dev/null +++ b/docs/_posts/2025-05-30-HPC-Algorithm.md @@ -0,0 +1,95 @@ +--- +title: HPC-Algorithm +description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains. +subjects: +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +badges: trending +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why this is important?** + + Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. + + **Project summary** + + This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. + + ## Prequisites + + - Intermediate undestanding of C, C++ or Fortran. + - Experience with high performance compute (HPC). + - Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. + - Access to Arm-based servers or SVE-enabled hardware + + ## Resources from Arm and our partners + + - Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) + - Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) + - Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description + +**Why this is important?** + +Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. + +**Project summary** + +This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. + +## Prequisites + +- Intermediate undestanding of C, C++ or Fortran. +- Experience with high performance compute (HPC). +- Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. +- Access to Arm-based servers or SVE-enabled hardware + +## Resources from Arm and our partners + +- Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) +- Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) +- Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) + + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md new file mode 100644 index 00000000..60ed1e4a --- /dev/null +++ b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md @@ -0,0 +1,125 @@ +--- +title: Haskell-Compiler-Windows-on-Arm +description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration. +subjects: +- Migration to Arm +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important?** + + The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. + + + **Project summary** + + Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: + - Enabling native compilation of Haskell code via GHC on WoA. + - Implementing and testing architecture-specific assembly and intrinsic functions. + - Extending the GHC build system to recognize WoA environments. + - Integrating and validating linker and runtime support on Arm-based Windows systems. + + The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. + + The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. + + --- + + ## Prequisites + + - Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) + - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + - Intemediate understanding of Arm64 Assembly (AArch64) + - Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) + - Access to MSYS2 / CMake / Ninja for Windows builds + + + ## Resources from Arm and our partners + + - External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) + - Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) + - External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description + +**Why this is important?** + +The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. + + +**Project summary** + +Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: +- Enabling native compilation of Haskell code via GHC on WoA. +- Implementing and testing architecture-specific assembly and intrinsic functions. +- Extending the GHC build system to recognize WoA environments. +- Integrating and validating linker and runtime support on Arm-based Windows systems. + +The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. + +The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. + +--- + +## Prequisites + +- Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) +- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) +- Intemediate understanding of Arm64 Assembly (AArch64) +- Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) +- Access to MSYS2 / CMake / Ninja for Windows builds + + +## Resources from Arm and our partners + +- External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) +- Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) +- External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Human-Centric-Robotics.md b/docs/_posts/2025-05-30-Human-Centric-Robotics.md new file mode 100644 index 00000000..c06979f0 --- /dev/null +++ b/docs/_posts/2025-05-30-Human-Centric-Robotics.md @@ -0,0 +1,113 @@ +--- +title: Human-Centric-Robotics +description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services. +subjects: +- ML +- Embedded Linux +- RTOS Fundamentals +requires-team: +- Yes +platform: +- Automotive +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +- Direct Support from Arm +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. + + ## Description + + **Why this is important?** + + Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. + + **Project Summary** + + This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. + + + Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). + + Potential Deliverables include: + - A working prototype running on an Arm-based platform + - Software stack (navigation, ML inference, interaction logic) + - Field evaluation results & UX data (e.g., survey or usage logs) + - Report of development process and considerations when prototyping an end-user product. + - A socioeconomic impact report using modeling or simulation techniques + + *Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* + + ## Estimated Project Duration + + 6+ months + Team size: 2+ participants + + ## Prerequisites + + - **Languages**: Familiarity with an OOP language. + - **Hardware**: + - **IP/Cloud Access**: + - Any cloud service provider with Arm-based instances (for model training or data analysis) + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. + +## Description + +**Why this is important?** + +Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. + +**Project Summary** + +This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. + + +Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). + +Potential Deliverables include: +- A working prototype running on an Arm-based platform +- Software stack (navigation, ML inference, interaction logic) +- Field evaluation results & UX data (e.g., survey or usage logs) +- Report of development process and considerations when prototyping an end-user product. +- A socioeconomic impact report using modeling or simulation techniques + +*Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* + +## Estimated Project Duration + +6+ months +Team size: 2+ participants + +## Prerequisites + +- **Languages**: Familiarity with an OOP language. +- **Hardware**: +- **IP/Cloud Access**: + - Any cloud service provider with Arm-based instances (for model training or data analysis) + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md new file mode 100644 index 00000000..7c87bffe --- /dev/null +++ b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md @@ -0,0 +1,79 @@ +--- +title: LLM-Benchmark-on-Arm-Server +description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. + + + ## Prequisites + + - Intermediate understanding of Python and C++ + - Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch + - Access to physcial Arm-based server or access to cloud service providers + + ## Resources from Arm and our partners + + - Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) + - External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) + - Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description +This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. + + +## Prequisites + +- Intermediate understanding of Python and C++ +- Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch +- Access to physcial Arm-based server or access to cloud service providers + +## Resources from Arm and our partners + +- Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) +- External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) +- Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md new file mode 100644 index 00000000..5aedb7ef --- /dev/null +++ b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md @@ -0,0 +1,111 @@ +--- +title: Machine-Learning-on-AWS-Graviton +description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads. +subjects: +- ML +- Migration to Arm +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + **Why is this important?** + + This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. + + + **Project Summary** + + The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. + + ## Prequisites + + - Intemediate understanding of Python. + - Understanding of transformer architectures, vision transformer architectures and inference optimization + - Experience using PyTorch or ONNX Runtime (CPU execution provider) + - Experience with libraries such as Hugging Face Transformers, torchvision + - Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) + - Familiarity with Linux, Docker, and cloud environments + + + ## Resources from Arm and our partners + + + - Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) + - Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) + - Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) + - External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) + - Repository: [GGML library](https://github.com/ggml-org/ggml) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +## Description + +**Why is this important?** + +This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. + + +**Project Summary** + +The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. + +## Prequisites + +- Intemediate understanding of Python. +- Understanding of transformer architectures, vision transformer architectures and inference optimization +- Experience using PyTorch or ONNX Runtime (CPU execution provider) +- Experience with libraries such as Hugging Face Transformers, torchvision +- Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) +- Familiarity with Linux, Docker, and cloud environments + + +## Resources from Arm and our partners + + +- Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) +- Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) +- Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) +- External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) +- Repository: [GGML library](https://github.com/ggml-org/ggml) + + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md new file mode 100644 index 00000000..9cc92a5b --- /dev/null +++ b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md @@ -0,0 +1,121 @@ +--- +title: Processor-in-the-Loop-Automotive +description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor. +subjects: +- Embedded Linux +- RTOS Fundamentals +- Virtual Hardware +requires-team: +- No +platform: +- Laptops and Desktops +- Automotive +- Embedded and Microcontrollers +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important** + + Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. + + **Project summary** + + Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. + + ## Prequisites + + - [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) + - Familiarity with C/C++, Simulink, Stateflow and Embedded Coder + - Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer + - Understanding of automotive software development such as V-Model lifecycle methodology. + + + ## Resources from Arm and our partners + + - Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) + - Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) + - Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) + - Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) + - Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) + - Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) + - Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) + - Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) + - Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) + - Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) + - Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) + - Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) + - Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description + +**Why this is important** + +Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. + +**Project summary** + +Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. + +## Prequisites + +- [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) +- Familiarity with C/C++, Simulink, Stateflow and Embedded Coder +- Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer +- Understanding of automotive software development such as V-Model lifecycle methodology. + + +## Resources from Arm and our partners + +- Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) +- Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) +- Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) +- Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) +- Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) +- Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) +- Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) +- Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) +- Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) +- Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) +- Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) +- Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) +- Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md new file mode 100644 index 00000000..66cfd281 --- /dev/null +++ b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md @@ -0,0 +1,106 @@ +--- +title: Quantisation-Aware-Training +description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). + + The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) + + Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: + - Sign language recognition for accessibility. + - Visual anomaly detection in manufacturing. + - Personal health and activity monitoring from camera feeds. + + The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. + + ## Prequisites + + - **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). + - **Frameworks**: Intermediate understanding of PyTorch + - **Tooling**: PyTorch Lightning, Android Studio + - **Hardware Options**: + - Android phone with Arm Cortex-A CPU or simulator through Android Studio. + - **Deployment Targets**: + - Android + + ## Resources from Arm and our partners + + - Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) + - Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) + - Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description + +This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). + +The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) + +Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: +- Sign language recognition for accessibility. +- Visual anomaly detection in manufacturing. +- Personal health and activity monitoring from camera feeds. + +The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. + +## Prequisites + +- **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). +- **Frameworks**: Intermediate understanding of PyTorch +- **Tooling**: PyTorch Lightning, Android Studio +- **Hardware Options**: + - Android phone with Arm Cortex-A CPU or simulator through Android Studio. +- **Deployment Targets**: + - Android + +## Resources from Arm and our partners + +- Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) +- Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) +- Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) +- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-R-Arm-Community-Support.md b/docs/_posts/2025-05-30-R-Arm-Community-Support.md new file mode 100644 index 00000000..3061387b --- /dev/null +++ b/docs/_posts/2025-05-30-R-Arm-Community-Support.md @@ -0,0 +1,140 @@ +--- +title: R-Arm-Community-Support +description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops. +subjects: +- Performance and Architecture +- Migration to Arm +- Libraries +requires-team: +- No +platform: +- Laptops and Desktops +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important?** + + Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. + + **Project summary** + + + This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: + + + - **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. + - **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. + - **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. + - **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) + - **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. + + Stretch Objectives: + + - **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). + + The deliverables include: + + - Patches, request for comments and bug reports the highest impact packages + - A curated list of packages with proposed WoA support status + - A short technical write-up describing the contributions and challenges + + ## Prequisites + + - Intermediate understanding of the R language + - Intermediate understanding of Rtools, Git and Docker for cross-compilation. + - Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. + - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). + + ## Resources from Arm and our partners + + - Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) + - Documentation: [R Bugzilla](https://bugs.r-project.org/) + - Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) + - Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) + - Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description + +**Why this is important?** + +Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. + +**Project summary** + + +This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: + + +- **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. +- **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. +- **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. +- **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) +- **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. + +Stretch Objectives: + +- **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). + +The deliverables include: + +- Patches, request for comments and bug reports the highest impact packages +- A curated list of packages with proposed WoA support status +- A short technical write-up describing the contributions and challenges + +## Prequisites + +- Intermediate understanding of the R language +- Intermediate understanding of Rtools, Git and Docker for cross-compilation. +- Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. +- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). + +## Resources from Arm and our partners + +- Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) +- Documentation: [R Bugzilla](https://bugs.r-project.org/) +- Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) +- Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) +- Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md new file mode 100644 index 00000000..19478c90 --- /dev/null +++ b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md @@ -0,0 +1,98 @@ +--- +title: Real-Time-Image-Classification +description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. + + + ## Prequisites + + - Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. + - Tooling: + - TensorFlow Lite + - CMSIS-NN + - Keil MDK + - Hardware: + - Arm Cortex-M based microcontroller development board and compatible camera module. + - Access to hardware suitable for training neural networks + + ## Resources from Arm and our partners + + - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) + - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) + - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) + - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description +This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. + + +## Prequisites + +- Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. +- Tooling: + - TensorFlow Lite + - CMSIS-NN + - Keil MDK +- Hardware: + - Arm Cortex-M based microcontroller development board and compatible camera module. + - Access to hardware suitable for training neural networks + +## Resources from Arm and our partners + +- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) +- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) +- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) +- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md new file mode 100644 index 00000000..f356ec62 --- /dev/null +++ b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md @@ -0,0 +1,144 @@ +--- +title: Responsible-AI-and-Yellow-Teaming +description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle. +subjects: +- ML +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + + **Why this is important?** + + AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. + + **Project summary** + + This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. + + The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. + + Key Objectives of Your Project + - Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. + - Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. + - Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. + - Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. + + + ## Prequisites + + If deploying a private Llama model -> + - **Hardware**: + - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. + - **Software**: + - PyTorch and Hugging Face account + - `torchchat` repo and dependencies + - Hugging Face CLI for LLM download + - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) + - **Skills**: + - Proficiency in Python and PyTorch + - [Hugging Face account](https://huggingface.co/) + - Understanding of LLMs and prompting techniques + + If using a public LLM -> + - **Hardware**: + - None needed + - **Software**: + - Access to a public LLM + - **Skills**: + - Understanding of LLMs and prompting techniques + + ## Resources from Arm and our partners + + - External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) + - Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) + - Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description + + +**Why this is important?** + +AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. + +**Project summary** + +This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. + +The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. + +Key Objectives of Your Project +- Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. +- Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. +- Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. +- Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. + + +## Prequisites + +If deploying a private Llama model -> +- **Hardware**: + - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. +- **Software**: + - PyTorch and Hugging Face account + - `torchchat` repo and dependencies + - Hugging Face CLI for LLM download + - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) +- **Skills**: + - Proficiency in Python and PyTorch + - [Hugging Face account](https://huggingface.co/) + - Understanding of LLMs and prompting techniques + +If using a public LLM -> +- **Hardware**: + - None needed +- **Software**: + - Access to a public LLM +- **Skills**: + - Understanding of LLMs and prompting techniques + +## Resources from Arm and our partners + +- External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) +- Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) +- Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md new file mode 100644 index 00000000..1f910af8 --- /dev/null +++ b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md @@ -0,0 +1,85 @@ +--- +title: Sentiment-Analysis-Dashboard +description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes. +subjects: +- ML +- Web +- Databases +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). + + This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. + + ## Prequisites + + - Languages: Intermediate understanding of Python + - Hardware: Access to a computer with internet connectivity and access to cloud instances + + ## Resources from Arm and our partners + + You are free to choose your own implementation details. The resouces below are examples to get started. + + - External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) + - Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description +This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). + +This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. + +## Prequisites + +- Languages: Intermediate understanding of Python +- Hardware: Access to a computer with internet connectivity and access to cloud instances + +## Resources from Arm and our partners + +You are free to choose your own implementation details. The resouces below are examples to get started. + +- External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) +- Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) + + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md new file mode 100644 index 00000000..a3394f30 --- /dev/null +++ b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md @@ -0,0 +1,93 @@ +--- +title: Smart-Voice-Assistant +description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use. +subjects: +- ML +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. + + The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. + + ## Prequisites + + - Languages: Python, C++, Embedded C + - Tooling: TensorFlow Lite for Microcontrollers, Keil MDK + - Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) + + + ## Resources from Arm and our partners + + - Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) + - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) + - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) + - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description +This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. + +The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. + +## Prequisites + +- Languages: Python, C++, Embedded C +- Tooling: TensorFlow Lite for Microcontrollers, Keil MDK +- Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) + + +## Resources from Arm and our partners + +- Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) +- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) +- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) +- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) +- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md new file mode 100644 index 00000000..01f66357 --- /dev/null +++ b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md @@ -0,0 +1,126 @@ +--- +title: SpecINT2017-benchmarking-on-Arm64 +description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks. +subjects: +- Performance and Architecture +- Migration to Arm +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ### Description + + **Why this is important?** + + SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. + + **Project Summary** + + This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. + + ## Prequisites + + Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) + + Software: Familiarity with performance engineering and a OOP with a language such as C++. + + Compilers: GCC, LLVM/Clang, Arm Compiler for Linux + + Profiling Tools: perf, Arm Performance Libraries + + Workloads: SPEC CPU2017 (academic license required), custom workloads + + ## Resources from Arm and our partners + + - Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) + + - Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) + + - Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) + + - Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) + + - Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) + + - Documentation: [GNU compilers](https://gcc.gnu.org/) + + - Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +### Description + +**Why this is important?** + +SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. + +**Project Summary** + +This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. + +## Prequisites + +Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) + +Software: Familiarity with performance engineering and a OOP with a language such as C++. + +Compilers: GCC, LLVM/Clang, Arm Compiler for Linux + +Profiling Tools: perf, Arm Performance Libraries + +Workloads: SPEC CPU2017 (academic license required), custom workloads + +## Resources from Arm and our partners + +- Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) + +- Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) + +- Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) + +- Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) + +- Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) + +- Documentation: [GNU compilers](https://gcc.gnu.org/) + +- Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-Write-A-Learning-Path.md b/docs/_posts/2025-05-30-Write-A-Learning-Path.md new file mode 100644 index 00000000..b372f0b5 --- /dev/null +++ b/docs/_posts/2025-05-30-Write-A-Learning-Path.md @@ -0,0 +1,80 @@ +--- +title: Write-A-Learning-Path +description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills. +subjects: +- Libraries +- Web +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- Automotive +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! + + ## Prequisites + + - Computer with Internet Connectivity + + ## Resources from Arm and our partners + + - Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) + - Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description +This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! + +## Prequisites + +- Computer with Internet Connectivity + +## Resources from Arm and our partners + +- Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) +- Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-05-30-projects.md b/docs/_posts/2025-05-30-projects.md new file mode 100644 index 00000000..fede046c --- /dev/null +++ b/docs/_posts/2025-05-30-projects.md @@ -0,0 +1,13 @@ +--- +title: projects +filter: project +publication-date: 2025-05-30 +layout: article +full_description: |- + **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. + + Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). +--- +**Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. + +Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). \ No newline at end of file diff --git a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md new file mode 100644 index 00000000..e9988d3a --- /dev/null +++ b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md @@ -0,0 +1,103 @@ +--- +title: C-Based-Application-from-Scratch +description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible. +subjects: +- Performance and Architecture +- Libraries +requires-team: +- No +platform: +- IoT +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-07-11 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why this is important?** + + Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. + + **Project Summary** + + This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. + + Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. + + Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. + + + ## Prerequisites + + - Access to a Raspberry Pi device (any generation) + - Intermediate Understanding of the C language + + + ## Resources from Arm and our partners + + - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). + - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + ### Previous Submissions + 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). + 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). +--- +## Description + +**Why this is important?** + +Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. + +**Project Summary** + +This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. + +Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. + +Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. + + +## Prerequisites + +- Access to a Raspberry Pi device (any generation) +- Intermediate Understanding of the C language + + +## Resources from Arm and our partners + +- External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). +- External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + +### Previous Submissions +1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). +2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). \ No newline at end of file diff --git a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md new file mode 100644 index 00000000..b2124229 --- /dev/null +++ b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md @@ -0,0 +1,122 @@ +--- +title: NPC-LLM-Runtime +description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions. +subjects: +- ML +- Gaming +- Graphics +requires-team: +- No +platform: +- AI +- Mobile, Graphics, and Gaming +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-08-28 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why is this important?** + + Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. + + **Project Summary** + + This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. + + To qualify, your submission should include, where possible: + + - Source code (with clear documentation and build instructions) + - A reproducible setup (e.g. scripts, datasets, or dependencies) + - A supporting document describing the project and design decisions + - High-quality images and a video (≤ 3 minutes) demonstrating the demo in action + + Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. + + ## Prequisites + - Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) + - Experience with integrating machine learning models into real-time applications + - Knowledge of C++, Python, or a game scripting language + - Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) + - Access to hardware capable of running LLM inference locally (PC or mobile) + + + ## Resources from Arm and our partners + + - Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) + - Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) + - Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) + - Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) + - Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) + - Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- +## Description + +**Why is this important?** + +Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. + +**Project Summary** + +This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. + +To qualify, your submission should include, where possible: + +- Source code (with clear documentation and build instructions) +- A reproducible setup (e.g. scripts, datasets, or dependencies) +- A supporting document describing the project and design decisions +- High-quality images and a video (≤ 3 minutes) demonstrating the demo in action + +Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. + +## Prequisites +- Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) +- Experience with integrating machine learning models into real-time applications +- Knowledge of C++, Python, or a game scripting language +- Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) +- Access to hardware capable of running LLM inference locally (PC or mobile) + + +## Resources from Arm and our partners + +- Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) +- Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) +- Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) +- Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) +- Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) +- Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) + + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-03-Python-Porting-Challenge.md b/docs/_posts/2025-11-03-Python-Porting-Challenge.md new file mode 100644 index 00000000..238998f6 --- /dev/null +++ b/docs/_posts/2025-11-03-Python-Porting-Challenge.md @@ -0,0 +1,118 @@ +--- +title: Python-Porting-Challenge +description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support. +subjects: +- Libraries +requires-team: +- No +platform: +- Laptops and Desktops +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-03 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* + + ## Description + + Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. + + This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. + + Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. + + Key Objectives: + + - Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). + - Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. + - Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. + + + ## Prequisites + + - Intermediate to advance understanding of the Python language + - Some experience on creating python packages and continuous integration testing. + - If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). + + ## Resources from Arm and our partners + + - External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + - External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) + - External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) + - Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) + - Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) + - External Documentation: [Status of Python versions](https://devguide.python.org/versions/) + - GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) + + + ## Support Level + + If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* + +## Description + +Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. + +This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. + +Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. + +Key Objectives: + +- Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). +- Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. +- Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. + + +## Prequisites + +- Intermediate to advance understanding of the Python language +- Some experience on creating python packages and continuous integration testing. +- If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). + +## Resources from Arm and our partners + +- External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) +- External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) +- External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) +- Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) +- Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) +- External Documentation: [Status of Python versions](https://devguide.python.org/versions/) +- GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) + + +## Support Level + +If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md new file mode 100644 index 00000000..27efe0e4 --- /dev/null +++ b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md @@ -0,0 +1,136 @@ +--- +title: Always-On-AI-with-Ethos-U85-NPU +description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. +subjects: +- ML +- Performance and Architecture +- Embedded Linux +- RTOS Fundamentals +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why is this important?** + + The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: + - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. + - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. + + This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. + + **Project Summary** + + Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: + + 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. + 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. + 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). + + *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* + + Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. + + You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. + + Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? + + ## What will you use? + You should either be familiar with, or willing to learn about, the following: + - Programming: Python, C++, Embedded C + - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. + - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS + + + ## Resources from Arm and our partners + - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) + - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) + - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description + +**Why is this important?** + +The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: +- A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. +- When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. + +This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. + +**Project Summary** + +Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: + +1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. +2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. +3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). + +*Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* + +Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. + +You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. + +Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? + +## What will you use? +You should either be familiar with, or willing to learn about, the following: +- Programming: Python, C++, Embedded C +- ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. +- Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS + + +## Resources from Arm and our partners +- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) +- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) +- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) +- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) +- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md new file mode 100644 index 00000000..458a8f1c --- /dev/null +++ b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md @@ -0,0 +1,130 @@ +--- +title: Edge-AI-On-Mobile +description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required. +subjects: +- ML +- Performance and Architecture +- Libraries +requires-team: +- No +platform: +- Mobile, Graphics, and Gaming +- AI +- IoT +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + ### Why is this important? + + SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. + + [SME2](https://www.arm.com/technologies/sme2) + + The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. + + [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) + + ### Project Summary + + Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. + + Example project areas: + - Real-time video semantic segmentation (e.g., background removal + AR compositing) + - Live object detection + natural-language description (text summary of what the camera sees) + - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition + - On-device lightweight LLM or encoder-only transformer processing for mobile assistants + + Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. + + Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. + + --- + + ## Resources from Arm and our partners + + - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) + - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) + - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) + - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) + - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) + + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + --- +--- +## Description + +### Why is this important? + +SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. + +[SME2](https://www.arm.com/technologies/sme2) + +The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. + +[vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) + +### Project Summary + +Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. + +Example project areas: + - Real-time video semantic segmentation (e.g., background removal + AR compositing) + - Live object detection + natural-language description (text summary of what the camera sees) + - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition + - On-device lightweight LLM or encoder-only transformer processing for mobile assistants + +Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. + +Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. + +--- + +## Resources from Arm and our partners + +- Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) +- Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) +- Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) +- Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) +- Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) +- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) +- Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) + +--- + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + +--- \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md new file mode 100644 index 00000000..a620c800 --- /dev/null +++ b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md @@ -0,0 +1,213 @@ +--- +title: Ethos-U85-NPU-Applications +description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + **Why is this important?** + + The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. + + This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. + + [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) + + **Project Summary** + + Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. + + Your project should include: + + 1. Model Deployment and Optimization + Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: + - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. + - The Vela compiler for optimization. + + These tools can be used to: + - Convert and visualize model graphs in TOSA format. + - Identify unsupported operators. + - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. + - Run Vela for optimized compilation targeting Ethos-U85. + + 2. Application Demonstration + Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: + - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). + - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. + - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. + + 3. Analysis and Benchmarking + Report quantitative results on: + - Inference latency, throughput (FPS or tokens/s), and memory footprint. + - Power efficiency under load (optional). + - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). + - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. + + --- + + ## What kind of projects should you target? + + To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: + + - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. + - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. + - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. + - *Example:* 512×512 semantic segmentation or multi-object detection. + - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. + - *Example:* large MLP heads or transformer token mixers. + - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. + + The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. + + --- + + ## What will you use? + You should be familiar with, or willing to learn about: + - Programming: Python, C/C++ + - ExecuTorch or TensorFlow Lite (Micro/LiteRT) + - Techniques for optimising AI models for the edge (quantization, pruning, etc.) + - Optimization Tools: + - TOSA Model Explorer + - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) + - Vela compiler for Ethos-U + - Bare-metal or RTOS (e.g., Zephyr) + + --- + + ## Resources from Arm and our partners + - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) + - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) + - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) + - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +## Description + +**Why is this important?** + +The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. + +This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. + +[Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) + +**Project Summary** + +Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. + +Your project should include: + +1. Model Deployment and Optimization + Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: + - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. + - The Vela compiler for optimization. + + These tools can be used to: + - Convert and visualize model graphs in TOSA format. + - Identify unsupported operators. + - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. + - Run Vela for optimized compilation targeting Ethos-U85. + +2. Application Demonstration + Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: + - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). + - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. + - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. + +3. Analysis and Benchmarking + Report quantitative results on: + - Inference latency, throughput (FPS or tokens/s), and memory footprint. + - Power efficiency under load (optional). + - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). + - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. + +--- + +## What kind of projects should you target? + +To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: + +- Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. + - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. +- High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. + - *Example:* 512×512 semantic segmentation or multi-object detection. +- Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. + - *Example:* large MLP heads or transformer token mixers. +- Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. + +The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. + +--- + +## What will you use? +You should be familiar with, or willing to learn about: +- Programming: Python, C/C++ +- ExecuTorch or TensorFlow Lite (Micro/LiteRT) +- Techniques for optimising AI models for the edge (quantization, pruning, etc.) +- Optimization Tools: + - TOSA Model Explorer + - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) + - Vela compiler for Ethos-U +- Bare-metal or RTOS (e.g., Zephyr) + +--- + +## Resources from Arm and our partners +- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) +- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) +- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) +- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) +- Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) +- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) +--- + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md new file mode 100644 index 00000000..58bcca09 --- /dev/null +++ b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md @@ -0,0 +1,158 @@ +--- +title: Game-Dev-Using-Neural-Graphics-&-Unreal-Engine +description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline. +subjects: +- ML +- Gaming +- Libraries +- Graphics +requires-team: +- No +platform: +- Mobile, Graphics, and Gaming +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + ### Why is this important? + + Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. + + Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. + + [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) + + Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. + + Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. + + ### Project Summary + + Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: + - **Near-identical visuals at lower resolution** (render low → upscale with NSS) + + Document your progress and findings and consider alternative applications of the neural technology within games development. + + Attempt different environments and objects. For example: + + - Daytime vs night + - Urban city, jungle forest, ocean floor, alien planet, building interiors + - Complex lighting and shadows + - NPCs with detailed clothing, faces, hair. Include animations. + + Make your scenes dynamic with particle effects, shadows, physics and motion. + + --- + + ## Pre-requisites + - Laptop/PC/Mobile for Android Unreal Engine game development + - Willingness to learn about games development and graphics, and the increasing use of AI in these fields. + + --- + + ## Resources from Arm and partners + - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) + - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) + - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) + - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) + - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) + - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) + - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) + - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) + - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) + - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) + + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + +## Description + +### Why is this important? + +Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. + +Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. + +[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) + +Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. + +Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. + +### Project Summary + +Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: +- **Near-identical visuals at lower resolution** (render low → upscale with NSS) + +Document your progress and findings and consider alternative applications of the neural technology within games development. + +Attempt different environments and objects. For example: + +- Daytime vs night +- Urban city, jungle forest, ocean floor, alien planet, building interiors +- Complex lighting and shadows +- NPCs with detailed clothing, faces, hair. Include animations. + +Make your scenes dynamic with particle effects, shadows, physics and motion. + +--- + +## Pre-requisites +- Laptop/PC/Mobile for Android Unreal Engine game development +- Willingness to learn about games development and graphics, and the increasing use of AI in these fields. + +--- + +## Resources from Arm and partners +- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) +- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) +- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) +- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) +- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) +- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) +- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) +- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) +- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) +- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) + +--- + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file From a10a50368ace3b2187216fb1a13b3e243d1619a3 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 11:54:22 +0000 Subject: [PATCH 90/98] Add files via upload --- docs/_posts/2025-05-30-AI-Agents.md | 70 ---------- .../2025-05-30-AI-Powered-Porting-Tool.md | 76 ----------- .../2025-05-30-AMBA-Simulator-Framework.md | 62 --------- .../2025-05-30-Academic-Trends-Dashboard.md | 63 --------- ...25-05-30-Architecture-Insight-Dashboard.md | 72 ----------- .../2025-05-30-Arduino-IDE-Windows-on-Arm.md | 77 ----------- ...5-05-30-Bioinformatic-Pipeline-Analysis.md | 76 ----------- ...30-Compliance-Ready-Smart-Camera-System.md | 65 ---------- .../2025-05-30-FPGA-Accellerator-with-DDR.md | 51 -------- docs/_posts/2025-05-30-HPC-Algorithm.md | 61 --------- ...5-05-30-Haskell-Compiler-Windows-on-Arm.md | 76 ----------- .../2025-05-30-Human-Centric-Robotics.md | 72 ----------- .../2025-05-30-LLM-Benchmark-on-Arm-Server.md | 53 -------- ...-05-30-Machine-Learning-on-AWS-Graviton.md | 69 ---------- ...-05-30-Processor-in-the-Loop-Automotive.md | 75 ----------- .../2025-05-30-Quantisation-Aware-Training.md | 67 ---------- .../2025-05-30-R-Arm-Community-Support.md | 83 ------------ ...25-05-30-Real-Time-Image-Classification.md | 63 --------- ...05-30-Responsible-AI-and-Yellow-Teaming.md | 85 ------------ ...2025-05-30-Sentiment-Analysis-Dashboard.md | 57 --------- .../2025-05-30-Smart-Voice-Assistant.md | 60 --------- ...05-30-SpecINT2017-benchmarking-on-Arm64.md | 77 ----------- .../2025-05-30-Write-A-Learning-Path.md | 56 -------- docs/_posts/2025-05-30-projects.md | 10 -- ...-07-11-C-Based-Application-from-Scratch.md | 64 --------- docs/_posts/2025-08-28-NPC-LLM-Runtime.md | 75 ----------- .../2025-11-03-Python-Porting-Challenge.md | 71 ---------- ...5-11-27-Always-On-AI-with-Ethos-U85-NPU.md | 83 ------------ docs/_posts/2025-11-27-Edge-AI-On-Mobile.md | 79 ------------ .../2025-11-27-Ethos-U85-NPU-Applications.md | 121 ------------------ ...Dev-Using-Neural-Graphics-Unreal-Engine.md | 94 -------------- 31 files changed, 2163 deletions(-) diff --git a/docs/_posts/2025-05-30-AI-Agents.md b/docs/_posts/2025-05-30-AI-Agents.md index 72172e58..ae38df4c 100644 --- a/docs/_posts/2025-05-30-AI-Agents.md +++ b/docs/_posts/2025-05-30-AI-Agents.md @@ -1,73 +1,3 @@ ---- -title: AI-Agents -description: This self-service project builds a sandboxed AI agent on Arm hardware that harnesses appropriately sized LLMs to safely automate complex workflows—from DevOps pipelines to e-commerce tasks—demonstrating secure, efficient automation on accessible Arm platforms. -subjects: -- ML -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). - - **Project Summary** - - Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. - - The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails - - ## Prerequisites - - - Intermediate understanding in an OOP language such as Python (for front-end, if needed). - - Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. - - Access to a LLM (e.g., through an API or on-device LLM) - - Optional API access to target workflow tools such as Jira, Jenkins etc. - - - ## Resources from Arm and our partners - - - Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) - - - Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - ### Previous Submissions - 1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) - - 2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) - - - 3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) ---- ### Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md index 82c890c4..0b3061ac 100644 --- a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md +++ b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md @@ -1,79 +1,3 @@ ---- -title: AI-Powered-Porting-Tool -description: This self-service project creates an AI-driven porting engine that analyzes package dependencies, auto-generates fixes, and submits pull requests—accelerating native macOS and Windows-on-Arm support for bioinformatics and R software so researchers can run demanding workflows directly on modern Arm devices. -subjects: -- CI-CD -- ML -- Migration to Arm -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. - - **Project Summary** - - This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). - - Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. - The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: - - - Identify unported packages - - Trace recursive dependency issues - - Recommend or auto-generate build recipes and steps - - Evaluate build success and reattempt intelligently - - Generate pull requests when confident of a fix. - - For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. - - Be extensible to work with various packaging systems and languages - - This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. - - ## Prerequisites - - - Access to Apple Silicon or Windows on Arm machine. - - Familiarity with Python, Bash and Nextflow - - Familiar with genomics/bioinformatics or statistics with the R language. - - Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. - - - ## Resources from Arm and our partners - - - External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) - - Documentation: [nf-core documentation](https://nf-co.re/docs/) - - External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - - Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md index 3b66cc46..2c4b9b9b 100644 --- a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md +++ b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md @@ -1,65 +1,3 @@ ---- -title: AMBA-Simulator-Framework -description: This self-guided hardware project has you implement, simulate, and FPGA-prototype a Verilog AMBA bus—from simple APB to advanced CHI—sharpening hands-on expertise with Arm’s interconnect backbone and yielding a reusable reference design for future embedded systems. -subjects: -- Virtual Hardware -- Performance and Architecture -requires-team: -- No -platform: -- Embedded and Microcontrollers -sw-hw: -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Audience - Electronic Engineering - - ## Description - This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. - - The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. - - ## Prequisites - - - Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). - - Access and basic understanding of ModelSim, Quartus and Vivado - - Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools - - ## Resources from Arm and our partners - - - - Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) - - Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Previous Submissions - - Similar projects: - - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Audience diff --git a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md index b79f62bc..1b4f0c1b 100644 --- a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md +++ b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md @@ -1,66 +1,3 @@ ---- -title: Academic-Trends-Dashboard -description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends. -subjects: -- Web -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why this is important?** - - The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. - - **Project Summary** - - The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). - - - ## Prequisites - - - Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. - - Hardware: Access to a computer with internet connectivity - - API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. - - ## Resources from Arm and our partners - - - Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) - - Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) - - Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description diff --git a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md index 16b56442..cf286c9c 100644 --- a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md +++ b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md @@ -1,75 +1,3 @@ ---- -title: Architecture-Insight-Dashboard -description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best. -subjects: -- Performance and Architecture -- Web -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ### Description - - **Why this is important?** - - Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. - - **Project Summary** - - This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: - - - Popularity of Arm architectures and Operating System combinations over time - - Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" - - - Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. - - ## Prequisites - - You are free to explore your own implementation. The skills below are examples. - - - Intemediate understanding of an OOP language such as Python or JavaScript - - Access to a computer with internet connectivity - - - ## Resources from Arm and our partners - - - Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) - - Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) - - Website: ["Can I Use?" dashboard](https://caniuse.com/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md index a1e2146a..e76528c0 100644 --- a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md +++ b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md @@ -1,80 +1,3 @@ ---- -title: Arduino-IDE-Windows-on-Arm -description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions. -subjects: -- Performance and Architecture -- Migration to Arm -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: -- trending -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. - - **Project summary** - - This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). - - ### Key Objectives: - - Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. - - Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. - - Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. - - Submit upstream patches and document issues to support long-term ecosystem health. - - This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. - - ## Prequisites - - - - Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) - - Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM - - Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` - - Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. - - - ## Resources from Arm and our partners - - - Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) - - Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) - - External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) - - Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform - - External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md index 66ef2921..7f1c8da0 100644 --- a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md +++ b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md @@ -1,79 +1,3 @@ ---- -title: Bioinformatic-Pipeline-Analysis -description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines. -subjects: -- Performance and Architecture -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. - - **Project summary** - - This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. - - The deliverables of the project are as follows: - - - Selection and justification of public genomic datasets. - - Execution of bioinformatics workflows using Bioconda packages on Arm64. - - Performance benchmarking and comparison with x86 architectures. - - Documentation of failed package builds and proposed fixes. - - Comprehensive report with results, analysis, and recommendations. - - - ## Prequisites - - - Intermediate understanding of Python, Bash and nextflow - - Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake - - Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage - - IP access to Public genomic databases (NCBI, ENA, etc.) - - ## Resources from Arm and our partners - - - External Documentation: [nf-core documentation](https://nf-co.re/docs/) - - - External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) - - - Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) - - - Repository: [Bioconda package repository](https://bioconda.github.io/) - - - Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ### Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md index 316c87a8..b61ea44b 100644 --- a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md +++ b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md @@ -1,68 +1,3 @@ ---- -title: Compliance-Ready-Smart-Camera-System -description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles. -subjects: -- Security -- Embedded Linux -- ML -- Virtual Hardware -requires-team: -- Yes -platform: -- Mobile, Graphics, and Gaming -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -- Direct Support from Arm -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - - ## Description - - **Why this is important?** - - As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. - - **Project summary** - - Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. - - Deliverables include: - - Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) - - A survey of regulatory requirements and their impact on smart camera design - - An architectural analysis integrating Arm-based systems into a compliant automotive software stack - - Recommendations for enhancing developer tools and reference software stacks to align with ISO standards - - ## Estimated Project Duration - - Estimated Time: 6+ months - - Participants: Team of 2+ - - ## Resources from Arm and Arm partners - - Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) - - Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. diff --git a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md index 4890eb01..f7ef7cc6 100644 --- a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md +++ b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md @@ -1,54 +1,3 @@ ---- -title: FPGA-Accellerator-with-DDR -description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance. -subjects: -- Virtual Hardware -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -sw-hw: -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). - - - ## Prequisites - - - Languages: Verilog, SystemVerilog - - Tooling: Vivado, ModelSim, ASIC design tools - - Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform - - IP access: Arm Academic Access member (link to get if they don't have it) - - ## Resources from Arm and our partners - - - External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). diff --git a/docs/_posts/2025-05-30-HPC-Algorithm.md b/docs/_posts/2025-05-30-HPC-Algorithm.md index 3445c23e..49898660 100644 --- a/docs/_posts/2025-05-30-HPC-Algorithm.md +++ b/docs/_posts/2025-05-30-HPC-Algorithm.md @@ -1,64 +1,3 @@ ---- -title: HPC-Algorithm -description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains. -subjects: -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. - - **Project summary** - - This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. - - ## Prequisites - - - Intermediate undestanding of C, C++ or Fortran. - - Experience with high performance compute (HPC). - - Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. - - Access to Arm-based servers or SVE-enabled hardware - - ## Resources from Arm and our partners - - - Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) - - Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) - - Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md index 60ed1e4a..c1df1457 100644 --- a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md +++ b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md @@ -1,79 +1,3 @@ ---- -title: Haskell-Compiler-Windows-on-Arm -description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration. -subjects: -- Migration to Arm -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. - - - **Project summary** - - Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: - - Enabling native compilation of Haskell code via GHC on WoA. - - Implementing and testing architecture-specific assembly and intrinsic functions. - - Extending the GHC build system to recognize WoA environments. - - Integrating and validating linker and runtime support on Arm-based Windows systems. - - The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. - - The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. - - --- - - ## Prequisites - - - Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) - - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - Intemediate understanding of Arm64 Assembly (AArch64) - - Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) - - Access to MSYS2 / CMake / Ninja for Windows builds - - - ## Resources from Arm and our partners - - - External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) - - Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) - - External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-Human-Centric-Robotics.md b/docs/_posts/2025-05-30-Human-Centric-Robotics.md index c06979f0..5e005ce5 100644 --- a/docs/_posts/2025-05-30-Human-Centric-Robotics.md +++ b/docs/_posts/2025-05-30-Human-Centric-Robotics.md @@ -1,75 +1,3 @@ ---- -title: Human-Centric-Robotics -description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services. -subjects: -- ML -- Embedded Linux -- RTOS Fundamentals -requires-team: -- Yes -platform: -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -- Direct Support from Arm -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. - - ## Description - - **Why this is important?** - - Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. - - **Project Summary** - - This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. - - - Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). - - Potential Deliverables include: - - A working prototype running on an Arm-based platform - - Software stack (navigation, ML inference, interaction logic) - - Field evaluation results & UX data (e.g., survey or usage logs) - - Report of development process and considerations when prototyping an end-user product. - - A socioeconomic impact report using modeling or simulation techniques - - *Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* - - ## Estimated Project Duration - - 6+ months - Team size: 2+ participants - - ## Prerequisites - - - **Languages**: Familiarity with an OOP language. - - **Hardware**: - - **IP/Cloud Access**: - - Any cloud service provider with Arm-based instances (for model training or data analysis) - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. ## Description diff --git a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md index 7c87bffe..d4081274 100644 --- a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md +++ b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md @@ -1,56 +1,3 @@ ---- -title: LLM-Benchmark-on-Arm-Server -description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. - - - ## Prequisites - - - Intermediate understanding of Python and C++ - - Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch - - Access to physcial Arm-based server or access to cloud service providers - - ## Resources from Arm and our partners - - - Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) - - External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) - - Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. diff --git a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md index 5aedb7ef..a2a47c22 100644 --- a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md +++ b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md @@ -1,72 +1,3 @@ ---- -title: Machine-Learning-on-AWS-Graviton -description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads. -subjects: -- ML -- Migration to Arm -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why is this important?** - - This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. - - - **Project Summary** - - The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. - - ## Prequisites - - - Intemediate understanding of Python. - - Understanding of transformer architectures, vision transformer architectures and inference optimization - - Experience using PyTorch or ONNX Runtime (CPU execution provider) - - Experience with libraries such as Hugging Face Transformers, torchvision - - Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) - - Familiarity with Linux, Docker, and cloud environments - - - ## Resources from Arm and our partners - - - - Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) - - Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) - - Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) - - External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) - - Repository: [GGML library](https://github.com/ggml-org/ggml) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description diff --git a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md index 9cc92a5b..7e0d3237 100644 --- a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md +++ b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md @@ -1,78 +1,3 @@ ---- -title: Processor-in-the-Loop-Automotive -description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor. -subjects: -- Embedded Linux -- RTOS Fundamentals -- Virtual Hardware -requires-team: -- No -platform: -- Laptops and Desktops -- Automotive -- Embedded and Microcontrollers -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important** - - Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. - - **Project summary** - - Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. - - ## Prequisites - - - [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) - - Familiarity with C/C++, Simulink, Stateflow and Embedded Coder - - Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer - - Understanding of automotive software development such as V-Model lifecycle methodology. - - - ## Resources from Arm and our partners - - - Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) - - Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) - - Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) - - Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) - - Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) - - Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) - - Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) - - Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) - - Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) - - Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) - - Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) - - Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) - - Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md index 66cfd281..ca68956a 100644 --- a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md +++ b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md @@ -1,70 +1,3 @@ ---- -title: Quantisation-Aware-Training -description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). - - The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) - - Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: - - Sign language recognition for accessibility. - - Visual anomaly detection in manufacturing. - - Personal health and activity monitoring from camera feeds. - - The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. - - ## Prequisites - - - **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). - - **Frameworks**: Intermediate understanding of PyTorch - - **Tooling**: PyTorch Lightning, Android Studio - - **Hardware Options**: - - Android phone with Arm Cortex-A CPU or simulator through Android Studio. - - **Deployment Targets**: - - Android - - ## Resources from Arm and our partners - - - Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) - - Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) - - Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). diff --git a/docs/_posts/2025-05-30-R-Arm-Community-Support.md b/docs/_posts/2025-05-30-R-Arm-Community-Support.md index 3061387b..cab3563e 100644 --- a/docs/_posts/2025-05-30-R-Arm-Community-Support.md +++ b/docs/_posts/2025-05-30-R-Arm-Community-Support.md @@ -1,86 +1,3 @@ ---- -title: R-Arm-Community-Support -description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops. -subjects: -- Performance and Architecture -- Migration to Arm -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why this is important?** - - Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. - - **Project summary** - - - This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: - - - - **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. - - **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. - - **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. - - **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) - - **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. - - Stretch Objectives: - - - **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). - - The deliverables include: - - - Patches, request for comments and bug reports the highest impact packages - - A curated list of packages with proposed WoA support status - - A short technical write-up describing the contributions and challenges - - ## Prequisites - - - Intermediate understanding of the R language - - Intermediate understanding of Rtools, Git and Docker for cross-compilation. - - Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. - - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). - - ## Resources from Arm and our partners - - - Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) - - Documentation: [R Bugzilla](https://bugs.r-project.org/) - - Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) - - Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) - - Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md index 19478c90..3bd0dbdb 100644 --- a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md +++ b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md @@ -1,66 +1,3 @@ ---- -title: Real-Time-Image-Classification -description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. - - - ## Prequisites - - - Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. - - Tooling: - - TensorFlow Lite - - CMSIS-NN - - Keil MDK - - Hardware: - - Arm Cortex-M based microcontroller development board and compatible camera module. - - Access to hardware suitable for training neural networks - - ## Resources from Arm and our partners - - - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) - - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) - - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) - - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md index f356ec62..74693b55 100644 --- a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md +++ b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md @@ -1,88 +1,3 @@ ---- -title: Responsible-AI-and-Yellow-Teaming -description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle. -subjects: -- ML -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - - **Why this is important?** - - AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. - - **Project summary** - - This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. - - The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. - - Key Objectives of Your Project - - Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. - - Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. - - Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. - - Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. - - - ## Prequisites - - If deploying a private Llama model -> - - **Hardware**: - - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. - - **Software**: - - PyTorch and Hugging Face account - - `torchchat` repo and dependencies - - Hugging Face CLI for LLM download - - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) - - **Skills**: - - Proficiency in Python and PyTorch - - [Hugging Face account](https://huggingface.co/) - - Understanding of LLMs and prompting techniques - - If using a public LLM -> - - **Hardware**: - - None needed - - **Software**: - - Access to a public LLM - - **Skills**: - - Understanding of LLMs and prompting techniques - - ## Resources from Arm and our partners - - - External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) - - Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) - - Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description diff --git a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md index 1f910af8..ed06aac1 100644 --- a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md +++ b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md @@ -1,60 +1,3 @@ ---- -title: Sentiment-Analysis-Dashboard -description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes. -subjects: -- ML -- Web -- Databases -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). - - This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. - - ## Prequisites - - - Languages: Intermediate understanding of Python - - Hardware: Access to a computer with internet connectivity and access to cloud instances - - ## Resources from Arm and our partners - - You are free to choose your own implementation details. The resouces below are examples to get started. - - - External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) - - Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). diff --git a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md index a3394f30..2c0b8567 100644 --- a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md +++ b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md @@ -1,63 +1,3 @@ ---- -title: Smart-Voice-Assistant -description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use. -subjects: -- ML -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. - - The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. - - ## Prequisites - - - Languages: Python, C++, Embedded C - - Tooling: TensorFlow Lite for Microcontrollers, Keil MDK - - Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) - - - ## Resources from Arm and our partners - - - Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) - - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) - - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) - - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md index 01f66357..1a5413cf 100644 --- a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md +++ b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md @@ -1,80 +1,3 @@ ---- -title: SpecINT2017-benchmarking-on-Arm64 -description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks. -subjects: -- Performance and Architecture -- Migration to Arm -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ### Description - - **Why this is important?** - - SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. - - **Project Summary** - - This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. - - ## Prequisites - - Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) - - Software: Familiarity with performance engineering and a OOP with a language such as C++. - - Compilers: GCC, LLVM/Clang, Arm Compiler for Linux - - Profiling Tools: perf, Arm Performance Libraries - - Workloads: SPEC CPU2017 (academic license required), custom workloads - - ## Resources from Arm and our partners - - - Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) - - - Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) - - - Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) - - - Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) - - - Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) - - - Documentation: [GNU compilers](https://gcc.gnu.org/) - - - Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ### Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-Write-A-Learning-Path.md b/docs/_posts/2025-05-30-Write-A-Learning-Path.md index b372f0b5..c70c695a 100644 --- a/docs/_posts/2025-05-30-Write-A-Learning-Path.md +++ b/docs/_posts/2025-05-30-Write-A-Learning-Path.md @@ -1,59 +1,3 @@ ---- -title: Write-A-Learning-Path -description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills. -subjects: -- Libraries -- Web -requires-team: -- No -platform: -- Servers and Cloud Computing -- Laptops and Desktops -- Mobile, Graphics, and Gaming -- Automotive -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-05-30 -license: -status: -- Hidden -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! - - ## Prequisites - - - Computer with Internet Connectivity - - ## Resources from Arm and our partners - - - Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) - - Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-05-30-projects.md b/docs/_posts/2025-05-30-projects.md index fede046c..aa515602 100644 --- a/docs/_posts/2025-05-30-projects.md +++ b/docs/_posts/2025-05-30-projects.md @@ -1,13 +1,3 @@ ---- -title: projects -filter: project -publication-date: 2025-05-30 -layout: article -full_description: |- - **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. - - Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). ---- **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). \ No newline at end of file diff --git a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md index e9988d3a..bfed1a05 100644 --- a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md +++ b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md @@ -1,67 +1,3 @@ ---- -title: C-Based-Application-from-Scratch -description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible. -subjects: -- Performance and Architecture -- Libraries -requires-team: -- No -platform: -- IoT -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-07-11 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why this is important?** - - Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. - - **Project Summary** - - This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. - - Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. - - Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. - - - ## Prerequisites - - - Access to a Raspberry Pi device (any generation) - - Intermediate Understanding of the C language - - - ## Resources from Arm and our partners - - - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). - - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - ### Previous Submissions - 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). - 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). ---- ## Description **Why this is important?** diff --git a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md index b2124229..e0dc8cdb 100644 --- a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md +++ b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md @@ -1,78 +1,3 @@ ---- -title: NPC-LLM-Runtime -description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions. -subjects: -- ML -- Gaming -- Graphics -requires-team: -- No -platform: -- AI -- Mobile, Graphics, and Gaming -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-08-28 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - **Why is this important?** - - Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. - - **Project Summary** - - This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. - - To qualify, your submission should include, where possible: - - - Source code (with clear documentation and build instructions) - - A reproducible setup (e.g. scripts, datasets, or dependencies) - - A supporting document describing the project and design decisions - - High-quality images and a video (≤ 3 minutes) demonstrating the demo in action - - Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. - - ## Prequisites - - Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) - - Experience with integrating machine learning models into real-time applications - - Knowledge of C++, Python, or a game scripting language - - Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) - - Access to hardware capable of running LLM inference locally (PC or mobile) - - - ## Resources from Arm and our partners - - - Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) - - Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) - - Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) - - Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) - - Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) - - Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) - - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description **Why is this important?** diff --git a/docs/_posts/2025-11-03-Python-Porting-Challenge.md b/docs/_posts/2025-11-03-Python-Porting-Challenge.md index 238998f6..7ea05003 100644 --- a/docs/_posts/2025-11-03-Python-Porting-Challenge.md +++ b/docs/_posts/2025-11-03-Python-Porting-Challenge.md @@ -1,74 +1,3 @@ ---- -title: Python-Porting-Challenge -description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support. -subjects: -- Libraries -requires-team: -- No -platform: -- Laptops and Desktops -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-03 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* - - ## Description - - Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. - - This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. - - Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. - - Key Objectives: - - - Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). - - Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. - - Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. - - - ## Prequisites - - - Intermediate to advance understanding of the Python language - - Some experience on creating python packages and continuous integration testing. - - If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). - - ## Resources from Arm and our partners - - - External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) - - External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) - - External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) - - Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) - - Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) - - External Documentation: [Status of Python versions](https://devguide.python.org/versions/) - - GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) - - - ## Support Level - - If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* diff --git a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md index 27efe0e4..8ec39bef 100644 --- a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md +++ b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md @@ -1,86 +1,3 @@ ---- -title: Always-On-AI-with-Ethos-U85-NPU -description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. -subjects: -- ML -- Performance and Architecture -- Embedded Linux -- RTOS Fundamentals -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - - ## Description - - **Why is this important?** - - The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: - - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. - - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. - - This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. - - **Project Summary** - - Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: - - 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. - 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. - 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). - - *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* - - Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. - - You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. - - Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? - - ## What will you use? - You should either be familiar with, or willing to learn about, the following: - - Programming: Python, C++, Embedded C - - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. - - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS - - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- diff --git a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md index 458a8f1c..e87e5142 100644 --- a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md +++ b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md @@ -1,82 +1,3 @@ ---- -title: Edge-AI-On-Mobile -description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required. -subjects: -- ML -- Performance and Architecture -- Libraries -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- AI -- IoT -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -layout: article -sidebar: - nav: projects -full_description: |- - ## Description - - ### Why is this important? - - SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. - - [SME2](https://www.arm.com/technologies/sme2) - - The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. - - [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) - - ### Project Summary - - Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. - - Example project areas: - - Real-time video semantic segmentation (e.g., background removal + AR compositing) - - Live object detection + natural-language description (text summary of what the camera sees) - - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition - - On-device lightweight LLM or encoder-only transformer processing for mobile assistants - - Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. - - Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. - - --- - - ## Resources from Arm and our partners - - - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) - - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) - - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) - - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) - - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. - - --- ---- ## Description ### Why is this important? diff --git a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md index a620c800..ce94ec3b 100644 --- a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md +++ b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md @@ -1,124 +1,3 @@ ---- -title: Ethos-U85-NPU-Applications -description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs. -subjects: -- ML -- Performance and Architecture -requires-team: -- No -platform: -- IoT -- Embedded and Microcontrollers -- AI -sw-hw: -- Software -- Hardware -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - **Why is this important?** - - The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. - - This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. - - [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) - - **Project Summary** - - Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. - - Your project should include: - - 1. Model Deployment and Optimization - Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: - - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. - - The Vela compiler for optimization. - - These tools can be used to: - - Convert and visualize model graphs in TOSA format. - - Identify unsupported operators. - - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. - - Run Vela for optimized compilation targeting Ethos-U85. - - 2. Application Demonstration - Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: - - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). - - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. - - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. - - 3. Analysis and Benchmarking - Report quantitative results on: - - Inference latency, throughput (FPS or tokens/s), and memory footprint. - - Power efficiency under load (optional). - - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). - - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. - - --- - - ## What kind of projects should you target? - - To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: - - - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. - - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. - - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. - - *Example:* 512×512 semantic segmentation or multi-object detection. - - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. - - *Example:* large MLP heads or transformer token mixers. - - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. - - The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. - - --- - - ## What will you use? - You should be familiar with, or willing to learn about: - - Programming: Python, C/C++ - - ExecuTorch or TensorFlow Lite (Micro/LiteRT) - - Techniques for optimising AI models for the edge (quantization, pruning, etc.) - - Optimization Tools: - - TOSA Model Explorer - - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) - - Vela compiler for Ethos-U - - Bare-metal or RTOS (e.g., Zephyr) - - --- - - ## Resources from Arm and our partners - - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) - - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) - - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) - - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) - - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) - - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md index 58bcca09..8a3ddc2d 100644 --- a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md +++ b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md @@ -1,97 +1,3 @@ ---- -title: Game-Dev-Using-Neural-Graphics-&-Unreal-Engine -description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline. -subjects: -- ML -- Gaming -- Libraries -- Graphics -requires-team: -- No -platform: -- Mobile, Graphics, and Gaming -- Laptops and Desktops -- AI -sw-hw: -- Software -support-level: -- Self-Service -- Arm Ambassador Support -publication-date: 2025-11-27 -license: -status: -- Published -badges: trending -donation: -layout: article -sidebar: - nav: projects -full_description: |- - - - ## Description - - ### Why is this important? - - Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. - - Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. - - [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) - - Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. - - Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. - - ### Project Summary - - Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: - - **Near-identical visuals at lower resolution** (render low → upscale with NSS) - - Document your progress and findings and consider alternative applications of the neural technology within games development. - - Attempt different environments and objects. For example: - - - Daytime vs night - - Urban city, jungle forest, ocean floor, alien planet, building interiors - - Complex lighting and shadows - - NPCs with detailed clothing, faces, hair. Include animations. - - Make your scenes dynamic with particle effects, shadows, physics and motion. - - --- - - ## Pre-requisites - - Laptop/PC/Mobile for Android Unreal Engine game development - - Willingness to learn about games development and graphics, and the increasing use of AI in these fields. - - --- - - ## Resources from Arm and partners - - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) - - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) - - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) - - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) - - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) - - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) - - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) - - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) - - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) - - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) - - --- - - ## Support Level - - This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). - - ## Benefits - - Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. - - - To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- ## Description From 55dbd6ed6c0e822195de3f0adebf342cbe42de52 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 11:58:58 +0000 Subject: [PATCH 91/98] Add files via upload --- docs/_posts/2025-05-30-AI-Agents.md | 71 ++++++++ .../2025-05-30-AI-Powered-Porting-Tool.md | 77 +++++++++ .../2025-05-30-AMBA-Simulator-Framework.md | 63 +++++++ .../2025-05-30-Academic-Trends-Dashboard.md | 64 +++++++ ...25-05-30-Architecture-Insight-Dashboard.md | 73 ++++++++ .../2025-05-30-Arduino-IDE-Windows-on-Arm.md | 78 +++++++++ ...5-05-30-Bioinformatic-Pipeline-Analysis.md | 77 +++++++++ ...30-Compliance-Ready-Smart-Camera-System.md | 66 ++++++++ .../2025-05-30-FPGA-Accellerator-with-DDR.md | 52 ++++++ docs/_posts/2025-05-30-HPC-Algorithm.md | 62 +++++++ ...5-05-30-Haskell-Compiler-Windows-on-Arm.md | 77 +++++++++ .../2025-05-30-Human-Centric-Robotics.md | 73 ++++++++ .../2025-05-30-LLM-Benchmark-on-Arm-Server.md | 54 ++++++ ...-05-30-Machine-Learning-on-AWS-Graviton.md | 70 ++++++++ ...-05-30-Processor-in-the-Loop-Automotive.md | 76 +++++++++ .../2025-05-30-Quantisation-Aware-Training.md | 68 ++++++++ .../2025-05-30-R-Arm-Community-Support.md | 84 +++++++++ ...25-05-30-Real-Time-Image-Classification.md | 64 +++++++ ...05-30-Responsible-AI-and-Yellow-Teaming.md | 86 ++++++++++ ...2025-05-30-Sentiment-Analysis-Dashboard.md | 58 +++++++ .../2025-05-30-Smart-Voice-Assistant.md | 61 +++++++ ...05-30-SpecINT2017-benchmarking-on-Arm64.md | 78 +++++++++ .../2025-05-30-Write-A-Learning-Path.md | 57 +++++++ docs/_posts/2025-05-30-projects.md | 11 ++ ...-07-11-C-Based-Application-from-Scratch.md | 65 +++++++ docs/_posts/2025-08-28-NPC-LLM-Runtime.md | 76 +++++++++ .../2025-11-03-Python-Porting-Challenge.md | 72 ++++++++ ...5-11-27-Always-On-AI-with-Ethos-U85-NPU.md | 84 +++++++++ docs/_posts/2025-11-27-Edge-AI-On-Mobile.md | 80 +++++++++ .../2025-11-27-Ethos-U85-NPU-Applications.md | 122 ++++++++++++++ ...v-Using-Neural-Graphics-&-Unreal-Engine.md | 159 ++++++++++++++++++ 31 files changed, 2258 insertions(+) create mode 100644 docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md diff --git a/docs/_posts/2025-05-30-AI-Agents.md b/docs/_posts/2025-05-30-AI-Agents.md index ae38df4c..bfd647c5 100644 --- a/docs/_posts/2025-05-30-AI-Agents.md +++ b/docs/_posts/2025-05-30-AI-Agents.md @@ -1,3 +1,74 @@ +--- +title: AI-Agents +description: This self-service project builds a sandboxed AI agent on Arm hardware that harnesses appropriately sized LLMs to safely automate complex workflows—from DevOps pipelines to e-commerce tasks—demonstrating secure, efficient automation on accessible Arm platforms. +subjects: +- ML +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +badges: trending +layout: article +sidebar: + nav: projects +full_description: |- + ### Description + + **Why this is important?** + + AI Agents enhance large language models (LLMs) by performing user-driven actions, enabling various commercial applications. This is a nascent domain will emerging frameworks such as the model context protocol (MCP) leading to commercial products and services. The Arm architecture, from microcontrollers to servers, will be used to carry out agentic functions and Arm has many initatives to support the AI future. See [our website for more details](https://www.arm.com/markets/artificial-intelligence). + + **Project Summary** + + Participants must develop an AI-powered agent that automates repetitive and complex workflow tasks in a specific domain, such as software development, e-commerice, or DevOps. The foundational model can be a suitable model of your choice (e.g., [OpenAI API](https://openai.com/api/)) but you must consider the appropriate model for cost, reliability and accessibility. Additionally, you are free to choose the tools for agent functionality, such as [LLama-cpp-agent](https://github.com/Maximilian-Winter/llama-cpp-agent). One stipulatation, is that the LLM and/or agent must run on an Arm-based system, such as a Google Pixel phone or Arm-based server. + + The AI agent will be deployed in a sandboxed environment to ensure safety and prevent unintended consequences, including prompt guardrails + + ## Prerequisites + + - Intermediate understanding in an OOP language such as Python (for front-end, if needed). + - Familiarity using Databases such as PostgreSQL, MongoDB, VectorDB. + - Access to a LLM (e.g., through an API or on-device LLM) + - Optional API access to target workflow tools such as Jira, Jenkins etc. + + + ## Resources from Arm and our partners + + - Learning path: [Deploy and MCP Server on a Raspberry Pi5 for AI Agent Interaction](https://learn.arm.com/learning-paths/cross-platform/mcp-ai-agent/) + + - Learning path: [Deploy an AI Agent on Arm with llama.cpp and llama-cpp-agent](https://learn.arm.com/learning-paths/servers-and-cloud-computing/ai-agent-on-cpu/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + ### Previous Submissions + 1. [AI to Solve Maths Example Sheets at University of Cambridge. (Finley Stirk, Eliyahu Gluschove-Koppel and Ronak De)](https://github.com/egkoppel/example-papers) + + 2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) + + + 3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) +--- + ### Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md index 0b3061ac..33384db1 100644 --- a/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md +++ b/docs/_posts/2025-05-30-AI-Powered-Porting-Tool.md @@ -1,3 +1,80 @@ +--- +title: AI-Powered-Porting-Tool +description: This self-service project creates an AI-driven porting engine that analyzes package dependencies, auto-generates fixes, and submits pull requests—accelerating native macOS and Windows-on-Arm support for bioinformatics and R software so researchers can run demanding workflows directly on modern Arm devices. +subjects: +- CI-CD +- ML +- Migration to Arm +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why this is important?** + + Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there remains a gap in native macOS and Windows on Arm support, as numerous packages default to emulated x86 environments. Additionally, the R community faces challenges with Windows-on-Arm support for community-created packages, with many unable to build due to x86-specific code issues. + + **Project Summary** + + This project challenges you to build an intelligent automation tool for porting software packages — for use in domains such as [bioinformatic pipelines with Nextflow](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/Bioinformatic-Pipeline-Analysis.md) or [statistics with R](https://github.com/arm-university/Arm-Developer-Labs/blob/main/Projects/Projects/R-Arm-Community-Support.md). + + Given the large number of community packages, applying manual patches is not only time-consuming but also inefficient, as many involve similar, repetitive adjustments—highlighting the need for a scalable, automated solution. + The goal is to build a sophisticated system (beyond simple shell scripts) that uses dependency graph analysis, machine learning, to: + + - Identify unported packages + - Trace recursive dependency issues + - Recommend or auto-generate build recipes and steps + - Evaluate build success and reattempt intelligently + - Generate pull requests when confident of a fix. + - For complex packages, offer guidance to developers on how to port them—for example, by suggesting tools like SSE2NEON for translating x86 SSE intrinsics. + - Be extensible to work with various packaging systems and languages + + This project is a blend of automation, machine learning, and systems programming. The outcome could directly contribute to open source ecosystems and help bring cutting-edge bioinformatics tools to wider hardware audiences. + + ## Prerequisites + + - Access to Apple Silicon or Windows on Arm machine. + - Familiarity with Python, Bash and Nextflow + - Familiar with genomics/bioinformatics or statistics with the R language. + - Experience or willing to learn nf-core pipelines, Conda, BioConda and Docker/Singularity. + + + ## Resources from Arm and our partners + + - External Resource: [Example Porting Script for Bioconda](https://github.com/dslarm/bioconda-contrib-notes/tree/main), [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) and [Bioconda package repository](https://bioconda.github.io/) + - Documentation: [nf-core documentation](https://nf-co.re/docs/) + - External Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/), Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages + - Dataset: Example [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md index 2c4b9b9b..0fee2bb1 100644 --- a/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md +++ b/docs/_posts/2025-05-30-AMBA-Simulator-Framework.md @@ -1,3 +1,66 @@ +--- +title: AMBA-Simulator-Framework +description: This self-guided hardware project has you implement, simulate, and FPGA-prototype a Verilog AMBA bus—from simple APB to advanced CHI—sharpening hands-on expertise with Arm’s interconnect backbone and yielding a reusable reference design for future embedded systems. +subjects: +- Virtual Hardware +- Performance and Architecture +requires-team: +- No +platform: +- Embedded and Microcontrollers +sw-hw: +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Audience + Electronic Engineering + + ## Description + This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI. + + The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages. + + ## Prequisites + + - Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL). + - Access and basic understanding of ModelSim, Quartus and Vivado + - Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools + + ## Resources from Arm and our partners + + + - Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4) + - Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Previous Submissions + + Similar projects: + - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Audience diff --git a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md index 1b4f0c1b..e44ac364 100644 --- a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md +++ b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md @@ -1,3 +1,67 @@ +--- +title: Academic-Trends-Dashboard +description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends. +subjects: +- Web +- Databases +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + **Why this is important?** + + The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions. + + **Project Summary** + + The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/). + + + ## Prequisites + + - Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics. + - Hardware: Access to a computer with internet connectivity + - API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners. + + ## Resources from Arm and our partners + + - Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)) + - Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/) + - Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium. + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description diff --git a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md index cf286c9c..b49d3df7 100644 --- a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md +++ b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md @@ -1,3 +1,76 @@ +--- +title: Architecture-Insight-Dashboard +description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best. +subjects: +- Performance and Architecture +- Web +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ### Description + + **Why this is important?** + + Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution. + + **Project Summary** + + This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard: + + - Popularity of Arm architectures and Operating System combinations over time + - Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?" + + + Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem. + + ## Prequisites + + You are free to explore your own implementation. The skills below are examples. + + - Intemediate understanding of an OOP language such as Python or JavaScript + - Access to a computer with internet connectivity + + + ## Resources from Arm and our partners + + - Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard) + - Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview) + - Website: ["Can I Use?" dashboard](https://caniuse.com/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md index e76528c0..d1ef1704 100644 --- a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md +++ b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md @@ -1,3 +1,81 @@ +--- +title: Arduino-IDE-Windows-on-Arm +description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions. +subjects: +- Performance and Architecture +- Migration to Arm +- Libraries +requires-team: +- No +platform: +- Laptops and Desktops +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +badges: +- trending +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important?** + + Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE. + + **Project summary** + + This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.). + + ### Key Objectives: + - Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm. + - Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA. + - Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation. + - Submit upstream patches and document issues to support long-term ecosystem health. + + This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience. + + ## Prequisites + + + - Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native) + - Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM + - Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2` + - Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information. + + + ## Resources from Arm and our partners + + - Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide) + - Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native) + - External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132) + - Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform + - External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + - Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md index 7f1c8da0..147a764d 100644 --- a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md +++ b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md @@ -1,3 +1,80 @@ +--- +title: Bioinformatic-Pipeline-Analysis +description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines. +subjects: +- Performance and Architecture +- Databases +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ### Description + + **Why this is important?** + + Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck. + + **Project summary** + + This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64. + + The deliverables of the project are as follows: + + - Selection and justification of public genomic datasets. + - Execution of bioinformatics workflows using Bioconda packages on Arm64. + - Performance benchmarking and comparison with x86 architectures. + - Documentation of failed package builds and proposed fixes. + - Comprehensive report with results, analysis, and recommendations. + + + ## Prequisites + + - Intermediate understanding of Python, Bash and nextflow + - Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake + - Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage + - IP access to Public genomic databases (NCBI, ENA, etc.) + + ## Resources from Arm and our partners + + - External Documentation: [nf-core documentation](https://nf-co.re/docs/) + + - External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/) + + - Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main) + + - Repository: [Bioconda package repository](https://bioconda.github.io/) + + - Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ### Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md index b61ea44b..f4f601cd 100644 --- a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md +++ b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md @@ -1,3 +1,69 @@ +--- +title: Compliance-Ready-Smart-Camera-System +description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles. +subjects: +- Security +- Embedded Linux +- ML +- Virtual Hardware +requires-team: +- Yes +platform: +- Mobile, Graphics, and Gaming +- Automotive +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +- Direct Support from Arm +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. + + + ## Description + + **Why this is important?** + + As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details. + + **Project summary** + + Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards. + + Deliverables include: + - Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262) + - A survey of regulatory requirements and their impact on smart camera design + - An architectural analysis integrating Arm-based systems into a compliant automotive software stack + - Recommendations for enhancing developer tools and reference software stacks to align with ISO standards + + ## Estimated Project Duration + - Estimated Time: 6+ months + - Participants: Team of 2+ + + ## Resources from Arm and Arm partners + - Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive) + - Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html)) + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. diff --git a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md index f7ef7cc6..55283664 100644 --- a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md +++ b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md @@ -1,3 +1,55 @@ +--- +title: FPGA-Accellerator-with-DDR +description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance. +subjects: +- Virtual Hardware +- Performance and Architecture +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +sw-hw: +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). + + + ## Prequisites + + - Languages: Verilog, SystemVerilog + - Tooling: Vivado, ModelSim, ASIC design tools + - Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform + - IP access: Arm Academic Access member (link to get if they don't have it) + + ## Resources from Arm and our partners + + - External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/). diff --git a/docs/_posts/2025-05-30-HPC-Algorithm.md b/docs/_posts/2025-05-30-HPC-Algorithm.md index 49898660..6de499fe 100644 --- a/docs/_posts/2025-05-30-HPC-Algorithm.md +++ b/docs/_posts/2025-05-30-HPC-Algorithm.md @@ -1,3 +1,65 @@ +--- +title: HPC-Algorithm +description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains. +subjects: +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +badges: trending +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why this is important?** + + Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2. + + **Project summary** + + This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm. + + ## Prequisites + + - Intermediate undestanding of C, C++ or Fortran. + - Experience with high performance compute (HPC). + - Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC. + - Access to Arm-based servers or SVE-enabled hardware + + ## Resources from Arm and our partners + + - Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/) + - Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/) + - Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md index c1df1457..4ee36484 100644 --- a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md +++ b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md @@ -1,3 +1,80 @@ +--- +title: Haskell-Compiler-Windows-on-Arm +description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration. +subjects: +- Migration to Arm +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important?** + + The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years. + + + **Project summary** + + Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by: + - Enabling native compilation of Haskell code via GHC on WoA. + - Implementing and testing architecture-specific assembly and intrinsic functions. + - Extending the GHC build system to recognize WoA environments. + - Integrating and validating linker and runtime support on Arm-based Windows systems. + + The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs. + + The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems. + + --- + + ## Prequisites + + - Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding) + - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + - Intemediate understanding of Arm64 Assembly (AArch64) + - Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen) + - Access to MSYS2 / CMake / Ninja for Windows builds + + + ## Resources from Arm and our partners + + - External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/) + - Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc) + - External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-Human-Centric-Robotics.md b/docs/_posts/2025-05-30-Human-Centric-Robotics.md index 5e005ce5..f936d4bf 100644 --- a/docs/_posts/2025-05-30-Human-Centric-Robotics.md +++ b/docs/_posts/2025-05-30-Human-Centric-Robotics.md @@ -1,3 +1,76 @@ +--- +title: Human-Centric-Robotics +description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services. +subjects: +- ML +- Embedded Linux +- RTOS Fundamentals +requires-team: +- Yes +platform: +- Automotive +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +- Direct Support from Arm +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. + + ## Description + + **Why this is important?** + + Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics. + + **Project Summary** + + This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection. + + + Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation). + + Potential Deliverables include: + - A working prototype running on an Arm-based platform + - Software stack (navigation, ML inference, interaction logic) + - Field evaluation results & UX data (e.g., survey or usage logs) + - Report of development process and considerations when prototyping an end-user product. + - A socioeconomic impact report using modeling or simulation techniques + + *Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).* + + ## Estimated Project Duration + + 6+ months + Team size: 2+ participants + + ## Prerequisites + + - **Languages**: Familiarity with an OOP language. + - **Hardware**: + - **IP/Cloud Access**: + - Any cloud service provider with Arm-based instances (for model training or data analysis) + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge. ## Description diff --git a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md index d4081274..4dc49866 100644 --- a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md +++ b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md @@ -1,3 +1,57 @@ +--- +title: LLM-Benchmark-on-Arm-Server +description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. + + + ## Prequisites + + - Intermediate understanding of Python and C++ + - Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch + - Access to physcial Arm-based server or access to cloud service providers + + ## Resources from Arm and our partners + + - Repository: [MLPerf Inference ](https://github.com/mlcommons/inference) + - External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/) + - Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development. diff --git a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md index a2a47c22..eb906318 100644 --- a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md +++ b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md @@ -1,3 +1,73 @@ +--- +title: Machine-Learning-on-AWS-Graviton +description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads. +subjects: +- ML +- Migration to Arm +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + **Why is this important?** + + This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency. + + + **Project Summary** + + The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies. + + ## Prequisites + + - Intemediate understanding of Python. + - Understanding of transformer architectures, vision transformer architectures and inference optimization + - Experience using PyTorch or ONNX Runtime (CPU execution provider) + - Experience with libraries such as Hugging Face Transformers, torchvision + - Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`) + - Familiarity with Linux, Docker, and cloud environments + + + ## Resources from Arm and our partners + + + - Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml) + - Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning) + - Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/) + - External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora) + - Repository: [GGML library](https://github.com/ggml-org/ggml) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description diff --git a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md index 7e0d3237..f744300f 100644 --- a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md +++ b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md @@ -1,3 +1,79 @@ +--- +title: Processor-in-the-Loop-Automotive +description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor. +subjects: +- Embedded Linux +- RTOS Fundamentals +- Virtual Hardware +requires-team: +- No +platform: +- Laptops and Desktops +- Automotive +- Embedded and Microcontrollers +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important** + + Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap. + + **Project summary** + + Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report. + + ## Prequisites + + - [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu) + - Familiarity with C/C++, Simulink, Stateflow and Embedded Coder + - Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer + - Understanding of automotive software development such as V-Model lifecycle methodology. + + + ## Resources from Arm and our partners + + - Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html) + - Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html) + - Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html) + - Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html) + - Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html) + - Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav) + - Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html) + - Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) + - Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html) + - Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html) + - Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html) + - Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html) + - Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md index ca68956a..52de2ef9 100644 --- a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md +++ b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md @@ -1,3 +1,71 @@ +--- +title: Quantisation-Aware-Training +description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). + + The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API ) + + Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like: + - Sign language recognition for accessibility. + - Visual anomaly detection in manufacturing. + - Personal health and activity monitoring from camera feeds. + + The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions. + + ## Prequisites + + - **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android). + - **Frameworks**: Intermediate understanding of PyTorch + - **Tooling**: PyTorch Lightning, Android Studio + - **Hardware Options**: + - Android phone with Arm Cortex-A CPU or simulator through Android Studio. + - **Deployment Targets**: + - Android + + ## Resources from Arm and our partners + + - Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html) + - Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html) + - Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones). diff --git a/docs/_posts/2025-05-30-R-Arm-Community-Support.md b/docs/_posts/2025-05-30-R-Arm-Community-Support.md index cab3563e..f06502e0 100644 --- a/docs/_posts/2025-05-30-R-Arm-Community-Support.md +++ b/docs/_posts/2025-05-30-R-Arm-Community-Support.md @@ -1,3 +1,87 @@ +--- +title: R-Arm-Community-Support +description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops. +subjects: +- Performance and Architecture +- Migration to Arm +- Libraries +requires-team: +- No +platform: +- Laptops and Desktops +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why this is important?** + + Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms. + + **Project summary** + + + This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include: + + + - **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support. + - **Proposing and testing patches upstream** for R packages that fail to build or run on WoA. + - **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches. + - **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/) + - **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds. + + Stretch Objectives: + + - **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain). + + The deliverables include: + + - Patches, request for comments and bug reports the highest impact packages + - A curated list of packages with proposed WoA support status + - A short technical write-up describing the contributions and challenges + + ## Prequisites + + - Intermediate understanding of the R language + - Intermediate understanding of Rtools, Git and Docker for cross-compilation. + - Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD. + - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments). + + ## Resources from Arm and our partners + + - Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) + - Documentation: [R Bugzilla](https://bugs.r-project.org/) + - Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/) + - Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/) + - Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md index 3bd0dbdb..1688a849 100644 --- a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md +++ b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md @@ -1,3 +1,67 @@ +--- +title: Real-Time-Image-Classification +description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller. + + + ## Prequisites + + - Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C. + - Tooling: + - TensorFlow Lite + - CMSIS-NN + - Keil MDK + - Hardware: + - Arm Cortex-M based microcontroller development board and compatible camera module. + - Access to hardware suitable for training neural networks + + ## Resources from Arm and our partners + + - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) + - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) + - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) + - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md index 74693b55..46430340 100644 --- a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md +++ b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md @@ -1,3 +1,89 @@ +--- +title: Responsible-AI-and-Yellow-Teaming +description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle. +subjects: +- ML +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + + **Why this is important?** + + AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides. + + **Project summary** + + This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape. + + The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices. + + Key Objectives of Your Project + - Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems. + - Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions. + - Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones. + - Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated. + + + ## Prequisites + + If deploying a private Llama model -> + - **Hardware**: + - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors. + - **Software**: + - PyTorch and Hugging Face account + - `torchchat` repo and dependencies + - Hugging Face CLI for LLM download + - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`) + - **Skills**: + - Proficiency in Python and PyTorch + - [Hugging Face account](https://huggingface.co/) + - Understanding of LLMs and prompting techniques + + If using a public LLM -> + - **Hardware**: + - None needed + - **Software**: + - Access to a public LLM + - **Skills**: + - Understanding of LLMs and prompting techniques + + ## Resources from Arm and our partners + + - External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course) + - Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/) + - Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description diff --git a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md index ed06aac1..26f0be37 100644 --- a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md +++ b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md @@ -1,3 +1,61 @@ +--- +title: Sentiment-Analysis-Dashboard +description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes. +subjects: +- ML +- Web +- Databases +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). + + This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format. + + ## Prequisites + + - Languages: Intermediate understanding of Python + - Hardware: Access to a computer with internet connectivity and access to cloud instances + + ## Resources from Arm and our partners + + You are free to choose your own implementation details. The resouces below are examples to get started. + + - External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) + - Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors). diff --git a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md index 2c0b8567..830a5837 100644 --- a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md +++ b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md @@ -1,3 +1,64 @@ +--- +title: Smart-Voice-Assistant +description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use. +subjects: +- ML +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference. + + The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system. + + ## Prequisites + + - Languages: Python, C++, Embedded C + - Tooling: TensorFlow Lite for Microcontrollers, Keil MDK + - Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers) + + + ## Resources from Arm and our partners + + - Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/) + - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/) + - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books) + - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md index 1a5413cf..29dc94cf 100644 --- a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md +++ b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md @@ -1,3 +1,81 @@ +--- +title: SpecINT2017-benchmarking-on-Arm64 +description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks. +subjects: +- Performance and Architecture +- Migration to Arm +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ### Description + + **Why this is important?** + + SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications. + + **Project Summary** + + This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance. + + ## Prequisites + + Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests) + + Software: Familiarity with performance engineering and a OOP with a language such as C++. + + Compilers: GCC, LLVM/Clang, Arm Compiler for Linux + + Profiling Tools: perf, Arm Performance Libraries + + Workloads: SPEC CPU2017 (academic license required), custom workloads + + ## Resources from Arm and our partners + + - Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf) + + - Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology) + + - Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/) + + - Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters) + + - Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/) + + - Documentation: [GNU compilers](https://gcc.gnu.org/) + + - Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ### Description **Why this is important?** diff --git a/docs/_posts/2025-05-30-Write-A-Learning-Path.md b/docs/_posts/2025-05-30-Write-A-Learning-Path.md index c70c695a..b2117dc6 100644 --- a/docs/_posts/2025-05-30-Write-A-Learning-Path.md +++ b/docs/_posts/2025-05-30-Write-A-Learning-Path.md @@ -1,3 +1,60 @@ +--- +title: Write-A-Learning-Path +description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills. +subjects: +- Libraries +- Web +requires-team: +- No +platform: +- Servers and Cloud Computing +- Laptops and Desktops +- Mobile, Graphics, and Gaming +- Automotive +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-05-30 +license: +status: +- Hidden +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions! + + ## Prequisites + + - Computer with Internet Connectivity + + ## Resources from Arm and our partners + + - Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/) + - Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-05-30-projects.md b/docs/_posts/2025-05-30-projects.md index aa515602..405c8ffc 100644 --- a/docs/_posts/2025-05-30-projects.md +++ b/docs/_posts/2025-05-30-projects.md @@ -1,3 +1,14 @@ +--- +title: projects +filter: project +publication-date: 2025-05-30 +layout: article +full_description: |- + **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. + + Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). +--- + **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike. Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/). \ No newline at end of file diff --git a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md index bfed1a05..3697a54d 100644 --- a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md +++ b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md @@ -1,3 +1,68 @@ +--- +title: C-Based-Application-from-Scratch +description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible. +subjects: +- Performance and Architecture +- Libraries +requires-team: +- No +platform: +- IoT +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-07-11 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why this is important?** + + Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high. + + **Project Summary** + + This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count. + + Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission. + + Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions. + + + ## Prerequisites + + - Access to a Raspberry Pi device (any generation) + - Intermediate Understanding of the C language + + + ## Resources from Arm and our partners + + - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html). + - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + ### Previous Submissions + 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). + 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). +--- + ## Description **Why this is important?** diff --git a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md index e0dc8cdb..0cc26757 100644 --- a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md +++ b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md @@ -1,3 +1,79 @@ +--- +title: NPC-LLM-Runtime +description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions. +subjects: +- ML +- Gaming +- Graphics +requires-team: +- No +platform: +- AI +- Mobile, Graphics, and Gaming +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-08-28 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + **Why is this important?** + + Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical. + + **Project Summary** + + This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video. + + To qualify, your submission should include, where possible: + + - Source code (with clear documentation and build instructions) + - A reproducible setup (e.g. scripts, datasets, or dependencies) + - A supporting document describing the project and design decisions + - High-quality images and a video (≤ 3 minutes) demonstrating the demo in action + + Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution. + + ## Prequisites + - Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot) + - Experience with integrating machine learning models into real-time applications + - Knowledge of C++, Python, or a game scripting language + - Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment) + - Access to hardware capable of running LLM inference locally (PC or mobile) + + + ## Resources from Arm and our partners + + - Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019) + - Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365) + - Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/) + - Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/) + - Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039) + - Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659) + + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description **Why is this important?** diff --git a/docs/_posts/2025-11-03-Python-Porting-Challenge.md b/docs/_posts/2025-11-03-Python-Porting-Challenge.md index 7ea05003..70521843 100644 --- a/docs/_posts/2025-11-03-Python-Porting-Challenge.md +++ b/docs/_posts/2025-11-03-Python-Porting-Challenge.md @@ -1,3 +1,75 @@ +--- +title: Python-Porting-Challenge +description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support. +subjects: +- Libraries +requires-team: +- No +platform: +- Laptops and Desktops +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-03 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* + + ## Description + + Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry. + + This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments. + + Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully. + + Key Objectives: + + - Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/). + - Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly. + - Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated. + + + ## Prequisites + + - Intermediate to advance understanding of the Python language + - Some experience on creating python packages and continuous integration testing. + - If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.). + + ## Resources from Arm and our partners + + - External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) + - External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/) + - External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/) + - Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/) + - Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524) + - External Documentation: [Status of Python versions](https://devguide.python.org/versions/) + - GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel) + + + ## Support Level + + If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com. + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility* diff --git a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md index 8ec39bef..dec9bb69 100644 --- a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md +++ b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md @@ -1,3 +1,87 @@ +--- +title: Always-On-AI-with-Ethos-U85-NPU +description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. +subjects: +- ML +- Performance and Architecture +- Embedded Linux +- RTOS Fundamentals +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + + ## Description + + **Why is this important?** + + The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy: + - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies. + - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance. + + This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources. + + **Project Summary** + + Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements: + + 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers. + 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference. + 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring). + + *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.* + + Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices. + + You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved. + + Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve? + + ## What will you use? + You should either be familiar with, or willing to learn about, the following: + - Programming: Python, C++, Embedded C + - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA. + - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS + + + ## Resources from Arm and our partners + - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) + - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) + - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + diff --git a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md index e87e5142..b7640d8a 100644 --- a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md +++ b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md @@ -1,3 +1,83 @@ +--- +title: Edge-AI-On-Mobile +description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required. +subjects: +- ML +- Performance and Architecture +- Libraries +requires-team: +- No +platform: +- Mobile, Graphics, and Gaming +- AI +- IoT +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +layout: article +sidebar: + nav: projects +full_description: |- + ## Description + + ### Why is this important? + + SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device. + + [SME2](https://www.arm.com/technologies/sme2) + + The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases. + + [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones) + + ### Project Summary + + Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2. + + Example project areas: + - Real-time video semantic segmentation (e.g., background removal + AR compositing) + - Live object detection + natural-language description (text summary of what the camera sees) + - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition + - On-device lightweight LLM or encoder-only transformer processing for mobile assistants + + Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case. + + Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2. + + --- + + ## Resources from Arm and our partners + + - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile) + - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/) + - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/) + - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/) + - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm) + + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + --- +--- + ## Description ### Why is this important? diff --git a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md index ce94ec3b..b412d1e4 100644 --- a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md +++ b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md @@ -1,3 +1,125 @@ +--- +title: Ethos-U85-NPU-Applications +description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs. +subjects: +- ML +- Performance and Architecture +requires-team: +- No +platform: +- IoT +- Embedded and Microcontrollers +- AI +sw-hw: +- Software +- Hardware +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + **Why is this important?** + + The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices. + + This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85. + + [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85) + + **Project Summary** + + Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities. + + Your project should include: + + 1. Model Deployment and Optimization + Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using: + - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance. + - The Vela compiler for optimization. + + These tools can be used to: + - Convert and visualize model graphs in TOSA format. + - Identify unsupported operators. + - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting. + - Run Vela for optimized compilation targeting Ethos-U85. + + 2. Application Demonstration + Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include: + - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification). + - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks. + - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding. + + 3. Analysis and Benchmarking + Report quantitative results on: + - Inference latency, throughput (FPS or tokens/s), and memory footprint. + - Power efficiency under load (optional). + - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits). + - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion. + + --- + + ## What kind of projects should you target? + + To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria: + + - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models. + - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms. + - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput. + - *Example:* 512×512 semantic segmentation or multi-object detection. + - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance. + - *Example:* large MLP heads or transformer token mixers. + - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations. + + The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies. + + --- + + ## What will you use? + You should be familiar with, or willing to learn about: + - Programming: Python, C/C++ + - ExecuTorch or TensorFlow Lite (Micro/LiteRT) + - Techniques for optimising AI models for the edge (quantization, pruning, etc.) + - Optimization Tools: + - TOSA Model Explorer + - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch) + - Vela compiler for Ethos-U + - Bare-metal or RTOS (e.g., Zephyr) + + --- + + ## Resources from Arm and our partners + - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai) + - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/) + - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm) + - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/) + - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model) + - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/) + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + ## Description diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md new file mode 100644 index 00000000..dc5c2161 --- /dev/null +++ b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md @@ -0,0 +1,159 @@ +--- +title: Game-Dev-Using-Neural-Graphics-&-Unreal-Engine +description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline. +subjects: +- ML +- Gaming +- Libraries +- Graphics +requires-team: +- No +platform: +- Mobile, Graphics, and Gaming +- Laptops and Desktops +- AI +sw-hw: +- Software +support-level: +- Self-Service +- Arm Ambassador Support +publication-date: 2025-11-27 +license: +status: +- Published +badges: trending +donation: +layout: article +sidebar: + nav: projects +full_description: |- + + + ## Description + + ### Why is this important? + + Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. + + Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. + + [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) + + Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. + + Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. + + ### Project Summary + + Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: + - **Near-identical visuals at lower resolution** (render low → upscale with NSS) + + Document your progress and findings and consider alternative applications of the neural technology within games development. + + Attempt different environments and objects. For example: + + - Daytime vs night + - Urban city, jungle forest, ocean floor, alien planet, building interiors + - Complex lighting and shadows + - NPCs with detailed clothing, faces, hair. Include animations. + + Make your scenes dynamic with particle effects, shadows, physics and motion. + + --- + + ## Pre-requisites + - Laptop/PC/Mobile for Android Unreal Engine game development + - Willingness to learn about games development and graphics, and the increasing use of AI in these fields. + + --- + + ## Resources from Arm and partners + - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) + - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) + - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) + - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) + - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) + - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) + - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) + - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) + - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) + - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) + + --- + + ## Support Level + + This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + + ## Benefits + + Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + + To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. +--- + + + +## Description + +### Why is this important? + +Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation. + +Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face. + +[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology) + +Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process. + +Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released. + +### Project Summary + +Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate: +- **Near-identical visuals at lower resolution** (render low → upscale with NSS) + +Document your progress and findings and consider alternative applications of the neural technology within games development. + +Attempt different environments and objects. For example: + +- Daytime vs night +- Urban city, jungle forest, ocean floor, alien planet, building interiors +- Complex lighting and shadows +- NPCs with detailed clothing, faces, hair. Include animations. + +Make your scenes dynamic with particle effects, shadows, physics and motion. + +--- + +## Pre-requisites +- Laptop/PC/Mobile for Android Unreal Engine game development +- Willingness to learn about games development and graphics, and the increasing use of AI in these fields. + +--- + +## Resources from Arm and partners +- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling) +- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works) +- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics) +- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/) +- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/) +- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/) +- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine) +- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines) +- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym) +- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/) + +--- + +## Support Level + +This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). + +## Benefits + +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. + + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file From 6c66a652923e029af686b31574cf5b064c1ab2a1 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 12:14:48 +0000 Subject: [PATCH 92/98] Add files via upload From 2cb87e98e0592d3271082fa49e96f7602b5b9957 Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 12:22:34 +0000 Subject: [PATCH 93/98] Add files via upload From 9b1cf02cd1412fcea665ea24a641330926fe820a Mon Sep 17 00:00:00 2001 From: Joshua Marshall-law Date: Mon, 22 Dec 2025 13:25:41 +0000 Subject: [PATCH 94/98] Update article.html --- docs/_layouts/article.html | 72 +------------------------------------- 1 file changed, 1 insertion(+), 71 deletions(-) diff --git a/docs/_layouts/article.html b/docs/_layouts/article.html index 151115f2..e61bf609 100644 --- a/docs/_layouts/article.html +++ b/docs/_layouts/article.html @@ -12,83 +12,13 @@ {%- include article/top/custom.html -%}
    - - {%- comment -%} - Per-project tags/meta for project detail pages. - Uses front matter on the page: - - subjects - - platform - - sw-hw - - support-level - - status - - badges - This is safe for non-project pages; if they don't have - these fields, nothing will render. - {%- endcomment -%} - {%- if page.subjects or page.platform or page["sw-hw"] or page["support-level"] or page.status or page.badges -%} -
    - {%- if page.subjects -%} -
    - Subjects: - {%- for s in page.subjects -%} - {{ s }} - {%- endfor -%} -
    - {%- endif -%} - - {%- if page.platform -%} -
    - Platform: - {%- for p in page.platform -%} - {{ p }} - {%- endfor -%} -
    - {%- endif -%} - - {%- if page["sw-hw"] -%} -
    - SW / HW: - {%- for tag in page["sw-hw"] -%} - {{ tag }} - {%- endfor -%} -
    - {%- endif -%} - - {%- if page["support-level"] -%} -
    - Support level: - {%- for lvl in page["support-level"] -%} - {{ lvl }} - {%- endfor -%} -
    - {%- endif -%} - - {%- if page.status -%} -
    - Status: - {%- for st in page.status -%} - {{ st }} - {%- endfor -%} -
    - {%- endif -%} - - {%- if page.badges -%} -
    - Badges: - {%- for badge in page.badges -%} - {{ badge }} - {%- endfor -%} -
    - {%- endif -%} -
    - {%- endif -%} - {{ content }} {%- if page.filter == "project" -%}
    {%- assign nav_group = site.data.navigation["projects"] -%} +