diff --git a/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md b/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md index a7738672..eaa31070 100644 --- a/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md +++ b/Projects/Extended-Team-Projects/Compliance-Ready-Smart-Camera-System.md @@ -57,6 +57,6 @@ Deliverables include: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Extended-Team-Projects/Human-Centric-Robotics.md b/Projects/Extended-Team-Projects/Human-Centric-Robotics.md index 9cdab1dc..d637e7db 100644 --- a/Projects/Extended-Team-Projects/Human-Centric-Robotics.md +++ b/Projects/Extended-Team-Projects/Human-Centric-Robotics.md @@ -64,6 +64,7 @@ Team size: 2+ participants ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + diff --git a/Projects/Projects/AI-Agents.md b/Projects/Projects/AI-Agents.md index 8bab6c46..48fb3fc5 100644 --- a/Projects/Projects/AI-Agents.md +++ b/Projects/Projects/AI-Agents.md @@ -19,6 +19,7 @@ license: status: - "Published" donation: + --- @@ -54,7 +55,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -63,4 +64,8 @@ To receive the benefits, you must show us your project through our [online form] 2. [AI that interprets user requests, generates circuit descriptions, creates LTSpice ASC code, and iteratively refines circuit designs using a combination of GPT-based language models, a vision analysis module, and LTSpice simulation. (Gijeong Lee, Bill Leoutsakos)](https://github.com/BillLeoutsakosvl346/ElectroNinjaRefined) -3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) \ No newline at end of file + +3. [AI agent to track real-time student engagement and exam performance (Jasper Wang, Sritej Tummuru, Talha Javed)](https://github.com/JasperWANG-911/AI_Agent) + + + diff --git a/Projects/Projects/AI-Powered-Porting-Tool.md b/Projects/Projects/AI-Powered-Porting-Tool.md index a4d84c98..e474f62d 100644 --- a/Projects/Projects/AI-Powered-Porting-Tool.md +++ b/Projects/Projects/AI-Powered-Porting-Tool.md @@ -69,6 +69,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/AMBA-Simulator-Framework.md b/Projects/Projects/AMBA-Simulator-Framework.md index b0781fb9..3ec62ed7 100644 --- a/Projects/Projects/AMBA-Simulator-Framework.md +++ b/Projects/Projects/AMBA-Simulator-Framework.md @@ -54,6 +54,7 @@ Similar projects: ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Academic-Trends-Dashboard.md b/Projects/Projects/Academic-Trends-Dashboard.md index 620e36ed..c42bfcc5 100644 --- a/Projects/Projects/Academic-Trends-Dashboard.md +++ b/Projects/Projects/Academic-Trends-Dashboard.md @@ -55,6 +55,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md b/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md index 014da0cd..bbe62a2f 100644 --- a/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md +++ b/Projects/Projects/Always-On-AI-with-Ethos-U85-NPU.md @@ -74,7 +74,8 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + diff --git a/Projects/Projects/Architecture-Insight-Dashboard.md b/Projects/Projects/Architecture-Insight-Dashboard.md index b502173c..af255e97 100644 --- a/Projects/Projects/Architecture-Insight-Dashboard.md +++ b/Projects/Projects/Architecture-Insight-Dashboard.md @@ -63,6 +63,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Arduino-IDE-Windows-on-Arm.md b/Projects/Projects/Arduino-IDE-Windows-on-Arm.md index a8f567d4..0202a074 100644 --- a/Projects/Projects/Arduino-IDE-Windows-on-Arm.md +++ b/Projects/Projects/Arduino-IDE-Windows-on-Arm.md @@ -67,6 +67,10 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + + + + diff --git a/Projects/Projects/Bioinformatic-Pipeline-Analysis.md b/Projects/Projects/Bioinformatic-Pipeline-Analysis.md index 065f7642..86235285 100644 --- a/Projects/Projects/Bioinformatic-Pipeline-Analysis.md +++ b/Projects/Projects/Bioinformatic-Pipeline-Analysis.md @@ -68,6 +68,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/C-Based-Application-from-Scratch.md b/Projects/Projects/C-Based-Application-from-Scratch.md index 06816d0f..e9fecd70 100644 --- a/Projects/Projects/C-Based-Application-from-Scratch.md +++ b/Projects/Projects/C-Based-Application-from-Scratch.md @@ -53,7 +53,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. @@ -61,3 +61,4 @@ To receive the benefits, you must show us your project through our [online form] 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git). 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game). + diff --git a/Projects/Projects/Edge-AI-On-Mobile.md b/Projects/Projects/Edge-AI-On-Mobile.md index 4b7f9113..acbc309a 100644 --- a/Projects/Projects/Edge-AI-On-Mobile.md +++ b/Projects/Projects/Edge-AI-On-Mobile.md @@ -20,6 +20,7 @@ publication-date: 2025-11-27 license: status: - "Published" +badges: trending --- ## Description @@ -68,9 +69,8 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. - +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. ---- \ No newline at end of file +--- diff --git a/Projects/Projects/Ethos-U85-NPU-Applications.md b/Projects/Projects/Ethos-U85-NPU-Applications.md index f9f7015d..b5107e2f 100644 --- a/Projects/Projects/Ethos-U85-NPU-Applications.md +++ b/Projects/Projects/Ethos-U85-NPU-Applications.md @@ -20,6 +20,7 @@ publication-date: 2025-11-27 license: status: - "Published" +badges: trending donation: --- @@ -111,7 +112,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/FPGA-Accellerator-with-DDR.md b/Projects/Projects/FPGA-Accellerator-with-DDR.md index f5050f41..220c0317 100644 --- a/Projects/Projects/FPGA-Accellerator-with-DDR.md +++ b/Projects/Projects/FPGA-Accellerator-with-DDR.md @@ -42,6 +42,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md b/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md index 53eca7a5..90fe0d68 100644 --- a/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md +++ b/Projects/Projects/Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md @@ -21,6 +21,7 @@ publication-date: 2025-11-27 license: status: - "Published" +badges: trending donation: --- @@ -84,7 +85,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/HPC-Algorithm.md b/Projects/Projects/HPC-Algorithm.md index 8c772899..fa232389 100644 --- a/Projects/Projects/HPC-Algorithm.md +++ b/Projects/Projects/HPC-Algorithm.md @@ -18,6 +18,7 @@ publication-date: 2025-05-30 license: status: - "Published" +badges: trending donation: --- @@ -53,6 +54,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md b/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md index 5c61c98c..f06a8ae1 100644 --- a/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md +++ b/Projects/Projects/Haskell-Compiler-Windows-on-Arm.md @@ -68,6 +68,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + diff --git a/Projects/Projects/LLM-Benchmark-on-Arm-Server.md b/Projects/Projects/LLM-Benchmark-on-Arm-Server.md index 7855110c..ad9c8868 100644 --- a/Projects/Projects/LLM-Benchmark-on-Arm-Server.md +++ b/Projects/Projects/LLM-Benchmark-on-Arm-Server.md @@ -45,6 +45,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Machine-Learning-on-AWS-Graviton.md b/Projects/Projects/Machine-Learning-on-AWS-Graviton.md index 21389beb..97b1627c 100644 --- a/Projects/Projects/Machine-Learning-on-AWS-Graviton.md +++ b/Projects/Projects/Machine-Learning-on-AWS-Graviton.md @@ -60,7 +60,7 @@ The aim of this project is to port, benchmark, and optimize a pre-trained ViT mo This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register). ## Benefits +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/NPC-LLM-Runtime.md b/Projects/Projects/NPC-LLM-Runtime.md index 707dc588..1f75a06d 100644 --- a/Projects/Projects/NPC-LLM-Runtime.md +++ b/Projects/Projects/NPC-LLM-Runtime.md @@ -19,6 +19,7 @@ publication-date: 2025-08-28 license: status: - "Published" +badges: trending donation: --- @@ -65,6 +66,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Processor-in-the-Loop-Automotive.md b/Projects/Projects/Processor-in-the-Loop-Automotive.md index e42fecc7..a6bee43b 100644 --- a/Projects/Projects/Processor-in-the-Loop-Automotive.md +++ b/Projects/Projects/Processor-in-the-Loop-Automotive.md @@ -67,6 +67,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + diff --git a/Projects/Projects/Python-Porting-Challenge.md b/Projects/Projects/Python-Porting-Challenge.md index 49572f8d..47a3ab1c 100644 --- a/Projects/Projects/Python-Porting-Challenge.md +++ b/Projects/Projects/Python-Porting-Challenge.md @@ -63,6 +63,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Quantisation-Aware-Training.md b/Projects/Projects/Quantisation-Aware-Training.md index 8f3eb952..7111a631 100644 --- a/Projects/Projects/Quantisation-Aware-Training.md +++ b/Projects/Projects/Quantisation-Aware-Training.md @@ -59,6 +59,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. + diff --git a/Projects/Projects/R-Arm-Community-Support.md b/Projects/Projects/R-Arm-Community-Support.md index baf9dfe8..d38f8368 100644 --- a/Projects/Projects/R-Arm-Community-Support.md +++ b/Projects/Projects/R-Arm-Community-Support.md @@ -74,6 +74,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Real-Time-Image-Classification.md b/Projects/Projects/Real-Time-Image-Classification.md index 83ccc6c0..01c5c857 100644 --- a/Projects/Projects/Real-Time-Image-Classification.md +++ b/Projects/Projects/Real-Time-Image-Classification.md @@ -55,6 +55,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md b/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md index 79cb96c9..ece793bd 100644 --- a/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md +++ b/Projects/Projects/Responsible-AI-and-Yellow-Teaming.md @@ -77,6 +77,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Sentiment-Analysis-Dashboard.md b/Projects/Projects/Sentiment-Analysis-Dashboard.md index 00a10050..4892f99e 100644 --- a/Projects/Projects/Sentiment-Analysis-Dashboard.md +++ b/Projects/Projects/Sentiment-Analysis-Dashboard.md @@ -51,6 +51,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Smart-Voice-Assistant.md b/Projects/Projects/Smart-Voice-Assistant.md index d2ca6e57..b2bb3fc9 100644 --- a/Projects/Projects/Smart-Voice-Assistant.md +++ b/Projects/Projects/Smart-Voice-Assistant.md @@ -51,6 +51,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md b/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md index 64f88267..ed9d9d70 100644 --- a/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md +++ b/Projects/Projects/SpecINT2017-benchmarking-on-Arm64.md @@ -70,6 +70,7 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file + +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/Projects/Projects/Write-A-Learning-Path.md b/Projects/Projects/Write-A-Learning-Path.md index b531d347..44e11ae6 100644 --- a/Projects/Projects/Write-A-Learning-Path.md +++ b/Projects/Projects/Write-A-Learning-Path.md @@ -48,6 +48,6 @@ This project is designed to be self-serve but comes with opportunity of some com ## Benefits -Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us. +Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition. -To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. \ No newline at end of file +To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material. diff --git a/docs/_config.yml b/docs/_config.yml index a050587f..cbd2b311 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -18,8 +18,8 @@ ############################## text_skin: default # "default" (default), "dark", "forest", "ocean", "chocolate", "orange" highlight_theme: default # "default" (default), "tomorrow", "tomorrow-night", "tomorrow-night-eighties", "tomorrow-night-blue", "tomorrow-night-bright" -url : # the base hostname & protocol for your site e.g. https://www.someone.com -baseurl : # does not include hostname +url: +baseurl: /Arm-Developer-Labs title : description: > # this means to ignore newlines until "Language & timezone" Developer Labs @@ -223,3 +223,7 @@ plugins: - jekyll-paginate - jekyll-sitemap - jemoji + + + + diff --git a/docs/_data/badges.yml b/docs/_data/badges.yml new file mode 100644 index 00000000..d8798ef7 --- /dev/null +++ b/docs/_data/badges.yml @@ -0,0 +1,13 @@ +new: + file: new.svg + alt: "New" +trending: + file: Trending.svg + alt: "Trending" +updated: + file: updated.svg + alt: "Recently updated" +recently_added: + file: RA.svg + alt: "Recently Added" + diff --git a/docs/_data/navigation.yml b/docs/_data/navigation.yml index 075c7b67..1e27216c 100644 --- a/docs/_data/navigation.yml +++ b/docs/_data/navigation.yml @@ -32,7 +32,7 @@ projects: \ laptops and desktops." url: /2025/05/30/R-Arm-Community-Support.html subjects: - - Performance and Architecture + - ML - Migration to Arm - Libraries platform: @@ -52,9 +52,9 @@ projects: url: /2025/05/30/AI-Agents.html subjects: - ML + - Performance and Architecture platform: - Servers and Cloud Computing - - Laptops and Desktops - AI sw-hw: - Software @@ -63,14 +63,13 @@ projects: - Arm Ambassador Support status: - Published - - title: Haskell-Compiler-Windows-on-Arm - description: "This self-service project brings native Glasgow Haskell Compiler\ - \ support to Windows on Arm\u2014unlocking efficient Arm-laptop builds, extending\ - \ Haskell\u2019s reach, and giving contributors hands-on experience with Arm64\ - \ code generation and runtime integration." - url: /2025/05/30/Haskell-Compiler-Windows-on-Arm.html + - title: R-Arm-Community-Support + description: "This self-service project boosts the R ecosystem on Windows on Arm\ + \ by identifying unsupported packages, upstreaming fixes, and automating builds\u2014\ + so data scientists can run their workflows natively on fast, efficient Arm64\ + \ laptops and desktops." + url: /2025/05/30/R-Arm-Community-Support.html subjects: - - Migration to Arm - Performance and Architecture platform: - Servers and Cloud Computing @@ -149,11 +148,9 @@ projects: - CI-CD - ML - Migration to Arm + - Libraries platform: - - Servers and Cloud Computing - Laptops and Desktops - - Mobile, Graphics, and Gaming - - AI sw-hw: - Software support-level: @@ -193,8 +190,21 @@ projects: - Servers and Cloud Computing - Laptops and Desktops - AI + - title: Haskell-Compiler-Windows-on-Arm + description: "This self-service project brings native Glasgow Haskell Compiler\ + \ support to Windows on Arm\u2014unlocking efficient Arm-laptop builds, extending\ + \ Haskell\u2019s reach, and giving contributors hands-on experience with Arm64\ + \ code generation and runtime integration." + url: /2025/05/30/Haskell-Compiler-Windows-on-Arm.html + subjects: + - Migration to Arm + - Performance and Architecture + platform: + - Servers and Cloud Computing + - Laptops and Desktops sw-hw: - Software + - Hardware support-level: - Self-Service - Arm Ambassador Support @@ -213,9 +223,18 @@ projects: - Servers and Cloud Computing - Laptops and Desktops - AI + - title: C-Based-Application-from-Scratch + description: This self-service project goes back to the fundamentals. The challenge + is to develop an application of your choice but your are only permitted to use + the C language with as few dependencies as possible. + url: /2025/07/11/C-Based-Application-from-Scratch.html + subjects: + - Performance and Architecture + - Libraries + platform: + - IoT sw-hw: - Software - - Hardware support-level: - Self-Service - Arm Ambassador Support @@ -253,6 +272,22 @@ projects: platform: - Servers and Cloud Computing - Laptops and Desktops + - title: AI-Powered-Porting-Tool + description: "This self-service project creates an AI-driven porting engine that\ + \ analyzes package dependencies, auto-generates fixes, and submits pull requests\u2014\ + accelerating native macOS and Windows-on-Arm support for bioinformatics and\ + \ R software so researchers can run demanding workflows directly on modern Arm\ + \ devices." + url: /2025/05/30/AI-Powered-Porting-Tool.html + subjects: + - CI-CD + - ML + - Migration to Arm + platform: + - Servers and Cloud Computing + - Laptops and Desktops + - Mobile, Graphics, and Gaming + - AI sw-hw: - Software support-level: @@ -267,10 +302,21 @@ projects: url: /2025/11/27/Ethos-U85-NPU-Applications.html subjects: - ML + - title: Architecture-Insight-Dashboard + description: "This self-service project develops a data-rich dashboard that visualizes\ + \ the popularity of Arm CPU/OS combinations and pinpoints software-stack support\ + \ for specific extensions\u2014giving developers an instant, validated view\ + \ of where their workloads will run best." + url: /2025/05/30/Architecture-Insight-Dashboard.html + subjects: - Performance and Architecture + - Web platform: - IoT - Embedded and Microcontrollers + - Servers and Cloud Computing + - Laptops and Desktops + - Mobile, Graphics, and Gaming - AI sw-hw: - Software @@ -300,19 +346,16 @@ projects: - Arm Ambassador Support status: - Published - - title: Academic-Trends-Dashboard - description: "This self-service project creates a web-scraping, database-driven\ - \ dashboard that visualizes how computer-science research topics shift over\ - \ time\u2014helping Arm partners and chip architects align future hardware designs\ - \ with emerging algorithmic trends." - url: /2025/05/30/Academic-Trends-Dashboard.html + - title: Responsible-AI-and-Yellow-Teaming + description: "This self-service project equips teams with a YellowTeamGPT workflow\ + \ that probes Arm-based AI products for unintended impacts\u2014turning responsible-AI\ + \ stress-testing into a core step of the development cycle." + url: /2025/05/30/Responsible-AI-and-Yellow-Teaming.html subjects: - - Web - - Databases + - ML platform: - Servers and Cloud Computing - Laptops and Desktops - - Mobile, Graphics, and Gaming - AI sw-hw: - Software @@ -321,6 +364,27 @@ projects: - Arm Ambassador Support status: - Published + - title: SpecINT2017-benchmarking-on-Arm64 + description: "This self-service project profiles SPEC CPU2017 on Arm64 servers\u2014\ + using GCC, Clang, and Arm Compiler with top-down analysis\u2014to reveal how\ + \ compiler choices and Arm micro-architectural features impact execution time,\ + \ energy efficiency, and performance bottlenecks." + url: /2025/05/30/SpecINT2017-benchmarking-on-Arm64.html + subjects: + - Performance and Architecture + - Migration to Arm + platform: + - Servers and Cloud Computing + - Laptops and Desktops + - AI + sw-hw: + - Software + - Hardware + support-level: + - Self-Service + - Arm Ambassador Support + status: + - Published - title: Edge-AI-On-Mobile description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, @@ -369,6 +433,12 @@ projects: it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks. url: /2025/11/27/Always-On-AI-with-Ethos-U85-NPU.html + - title: Arduino-IDE-Windows-on-Arm + description: "This self-service project ports and optimizes the Arduino IDE\u2014\ + patching its lzma-native dependency\u2014to run natively and efficiently on\ + \ Windows on Arm, giving developers hands-on experience with cross-platform\ + \ builds, Arm64 performance tuning, and upstream open-source contributions." + url: /2025/05/30/Arduino-IDE-Windows-on-Arm.html subjects: - ML - Performance and Architecture @@ -377,6 +447,31 @@ projects: platform: - IoT - Embedded and Microcontrollers + - Migration to Arm + - Libraries + platform: + - Laptops and Desktops + sw-hw: + - Software + - Hardware + support-level: + - Self-Service + - Arm Ambassador Support + status: + - Published + - title: Academic-Trends-Dashboard + description: "This self-service project creates a web-scraping, database-driven\ + \ dashboard that visualizes how computer-science research topics shift over\ + \ time\u2014helping Arm partners and chip architects align future hardware designs\ + \ with emerging algorithmic trends." + url: /2025/05/30/Academic-Trends-Dashboard.html + subjects: + - Web + - Databases + platform: + - Servers and Cloud Computing + - Laptops and Desktops + - Mobile, Graphics, and Gaming - AI sw-hw: - Software @@ -399,6 +494,16 @@ projects: - Graphics platform: - Mobile, Graphics, and Gaming + - title: HPC-Algorithm + description: "This self-service project is around finding a HPC algorithm and\ + \ accelerating it with Arm\u2019s SVE/SVE2 vectorization\u2014demonstrating\ + \ how next-generation Arm hardware can deliver significant, scalable performance\ + \ gains." + url: /2025/05/30/HPC-Algorithm.html + subjects: + - Performance and Architecture + platform: + - Servers and Cloud Computing - Laptops and Desktops - AI sw-hw: diff --git a/docs/_includes/article-list.html b/docs/_includes/article-list.html index f66cb3d1..06de9513 100644 --- a/docs/_includes/article-list.html +++ b/docs/_includes/article-list.html @@ -108,33 +108,44 @@ {%- elsif include.type == 'grid' -%} + {%- assign _badges = _article.badges | default: _article.data.badges -%} {%- if include.size == 'sm' -%}
- {{ _child.description }} -
+ data-support-level='{{ _child["support-level"] | jsonify }}' + {%- if _badges -%} data-badges='{{ _badges | jsonify }}'{%- endif -%} + data-url-rel='{{ _nav_url_rel }}'> + ++ {{ _child.description }} +
-
- ## Audience
- Electronic Engineering
-
- ## Description
- This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI.
-
- The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages.
-
- ## Prequisites
-
- - Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL).
- - Access and basic understanding of ModelSim, Quartus and Vivado
- - Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools
-
- ## Resources from Arm and our partners
-
-
- - Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4)
- - Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Previous Submissions
-
- Similar projects:
- - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-## Audience
-Electronic Engineering
-
-## Description
-This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI.
-
-The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages.
-
-## Prequisites
-
-- Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL).
-- Access and basic understanding of ModelSim, Quartus and Vivado
-- Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools
-
-## Resources from Arm and our partners
-
-
-- Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4)
-- Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Previous Submissions
-
-Similar projects:
- - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: AMBA-Simulator-Framework
+description: This self-guided hardware project has you implement, simulate, and FPGA-prototype a Verilog AMBA bus—from simple APB to advanced CHI—sharpening hands-on expertise with Arm’s interconnect backbone and yielding a reusable reference design for future embedded systems.
+subjects:
+- Virtual Hardware
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- Embedded and Microcontrollers
+sw-hw:
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+ ## Audience
+ Electronic Engineering
+
+ ## Description
+ This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI.
+
+ The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages.
+
+ ## Prequisites
+
+ - Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL).
+ - Access and basic understanding of ModelSim, Quartus and Vivado
+ - Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools
+
+ ## Resources from Arm and our partners
+
+
+ - Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4)
+ - Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Previous Submissions
+
+ Similar projects:
+ - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+## Audience
+Electronic Engineering
+
+## Description
+This project aims to develop a reference design of AMBA (Advanced Microcontroller Bus Architecture) infrastructure. You are free to choose which AMBA protocol and version of the interconnect standard to implement with more simple specifications e.g., APB, being easier to create than coherent protocols such as AMBA CHI.
+
+The main deliverables include the Verilog design of the AMBA infrastructure, a Verilog test bench for testing the design, an RTL (Register Transfer Level) simulation flow to verify the functionality, and an FPGA prototyping platform to demonstrate the design in a real-world environment. The project will provide a comprehensive understanding of AMBA protocols and their implementation, making it an excellent learning opportunity for students interested in digital design and hardware description languages.
+
+## Prequisites
+
+- Intermediate understanding of Verilog, SystemVerilog or other hardware description languages (HDL).
+- Access and basic understanding of ModelSim, Quartus and Vivado
+- Access to a suitable FPGA development board (e.g., Xilinx or Altera), simulation tools
+
+## Resources from Arm and our partners
+
+
+- Video: [Introductory Video to AMBA](https://www.youtube.com/watch?v=zayyWwSxyW4)
+- Documentation: [AMBA Interconnect Specifications](https://www.arm.com/architecture/system-architectures/amba/amba-specifications)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Previous Submissions
+
+Similar projects:
+ - https://github.com/kumarraj5364/AMBA-APB-PROTOCOL
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md
index 220c18f8..59ebb6ee 100644
--- a/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md
+++ b/docs/_posts/2025-05-30-Academic-Trends-Dashboard.md
@@ -1,98 +1,99 @@
----
-title: Design a Dashboard that Tracks the Progression of Academic Papers on Computer Science Over Time
-description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends.
-subjects:
-- Web
-- Databases
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- Mobile, Graphics, and Gaming
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
- ## Description
-
- **Why this is important?**
-
- The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions.
-
- **Project Summary**
-
- The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/).
-
-
- ## Prequisites
-
- - Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics.
- - Hardware: Access to a computer with internet connectivity
- - API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners.
-
- ## Resources from Arm and our partners
-
- - Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/))
- - Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/)
- - Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium.
-
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-## Description
-
-**Why this is important?**
-
-The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions.
-
-**Project Summary**
-
-The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/).
-
-
-## Prequisites
-
-- Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics.
-- Hardware: Access to a computer with internet connectivity
-- API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners.
-
-## Resources from Arm and our partners
-
-- Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/))
-- Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/)
-- Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium.
-
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Academic-Trends-Dashboard
+description: This self-service project creates a web-scraping, database-driven dashboard that visualizes how computer-science research topics shift over time—helping Arm partners and chip architects align future hardware designs with emerging algorithmic trends.
+subjects:
+- Web
+- Databases
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- Mobile, Graphics, and Gaming
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+ ## Description
+
+ **Why this is important?**
+
+ The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions.
+
+ **Project Summary**
+
+ The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/).
+
+
+ ## Prequisites
+
+ - Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics.
+ - Hardware: Access to a computer with internet connectivity
+ - API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners.
+
+ ## Resources from Arm and our partners
+
+ - Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/))
+ - Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/)
+ - Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium.
+
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+## Description
+
+**Why this is important?**
+
+The field of computer science research is continually evolving, with new algorithms shaping the design of future hardware. This project aims to guide computer architecture decisions, ensuring that upcoming hardware aligns with the needs of software algorithms and applications. The dashboard you create can be of use to Arm partners manufacturing physical chips and used to guide architecture decisions.
+
+**Project Summary**
+
+The main deliverable is a web scraping tool that pulls keywords from academic papers, considering the popularity of the paper and its publication. The data will be stored in an appropriate database and displayed in a web browser format, allowing users to visualize trends and changes in research focus over time. This project will provide practical experience in using APIs, web scraping, and data analysis. Some academic search engines to consider are Google Scholar, [BASE](https://www.base-search.net/), [Core](https://core.ac.uk/) and [Science.gov](https://science.gov/).
+
+
+## Prequisites
+
+- Software: Intermediate understand of a scripting programming language (e.g., Python, JavaScript), web development and statistics.
+- Hardware: Access to a computer with internet connectivity
+- API access to scrape specific journal websites, you may need to obtain explicit permission from the website administrators or owners.
+
+## Resources from Arm and our partners
+
+- Learning path: [Deploy MariaDB on Arm servers](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/))
+- Learning path: [Learn how to deploy PostgresSQL](https://learn.arm.com/learning-paths/servers-and-cloud-computing/postgresql/)
+- Software Libraries: Example libraries for web scraping are [BeautifulSoup](https://pypi.org/project/beautifulsoup4/), Selenium.
+
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md
index 048d8d8c..1c74fee7 100644
--- a/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md
+++ b/docs/_posts/2025-05-30-Architecture-Insight-Dashboard.md
@@ -1,114 +1,117 @@
----
-title: Develop an Arm Architecture Insight Dashboard
-description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best.
-subjects:
-- Performance and Architecture
-- Web
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- Mobile, Graphics, and Gaming
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ### Description
-
- **Why this is important?**
-
- Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution.
-
- **Project Summary**
-
- This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard:
-
- - Popularity of Arm architectures and Operating System combinations over time
- - Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?"
-
-
- Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem.
-
- ## Prequisites
-
- You are free to explore your own implementation. The skills below are examples.
-
- - Intemediate understanding of an OOP language such as Python or JavaScript
- - Access to a computer with internet connectivity
-
-
- ## Resources from Arm and our partners
-
- - Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard)
- - Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview)
- - Website: ["Can I Use?" dashboard](https://caniuse.com/)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-### Description
-
-**Why this is important?**
-
-Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution.
-
-**Project Summary**
-
-This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard:
-
-- Popularity of Arm architectures and Operating System combinations over time
-- Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?"
-
-
-Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem.
-
-## Prequisites
-
-You are free to explore your own implementation. The skills below are examples.
-
-- Intemediate understanding of an OOP language such as Python or JavaScript
-- Access to a computer with internet connectivity
-
-
-## Resources from Arm and our partners
-
-- Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard)
-- Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview)
-- Website: ["Can I Use?" dashboard](https://caniuse.com/)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Architecture-Insight-Dashboard
+description: This self-service project develops a data-rich dashboard that visualizes the popularity of Arm CPU/OS combinations and pinpoints software-stack support for specific extensions—giving developers an instant, validated view of where their workloads will run best.
+subjects:
+- Performance and Architecture
+- Web
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- Mobile, Graphics, and Gaming
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ### Description
+
+ **Why this is important?**
+
+ Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution.
+
+ **Project Summary**
+
+ This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard:
+
+ - Popularity of Arm architectures and Operating System combinations over time
+ - Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?"
+
+
+ Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem.
+
+ ## Prequisites
+
+ You are free to explore your own implementation. The skills below are examples.
+
+ - Intemediate understanding of an OOP language such as Python or JavaScript
+ - Access to a computer with internet connectivity
+
+
+ ## Resources from Arm and our partners
+
+ - Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard)
+ - Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview)
+ - Website: ["Can I Use?" dashboard](https://caniuse.com/)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+### Description
+
+**Why this is important?**
+
+Developers often face challenges in selecting the appropriate platform for their software. With numerous smartphones and cloud instances available, gauging consumer popularity and availability can be difficult, and identifying software stack dependencies can be time-consuming. As Arm anticipates an increase in Arm-based products in the coming years, this situation is likely to become even more complex, requiring the need for a single, validated solution.
+
+**Project Summary**
+
+This project aims to develop a comprehensive dashboard that lets a developer know what proportion of devices support a specific Arm CPU extension, similar to [“Can I use”](https://caniuse.com/) for web development and any software compatibility issues. The functional requirements for the Architecture Insights dashboard:
+
+- Popularity of Arm architectures and Operating System combinations over time
+- Searchable index of software, libraries and tools that have been optimised for a specific architecture. For example, "Does the video processing software, FFMPEG, support acceleration for SVE2 with Windows 11?"
+
+
+Students will gain hands-on experience with data visualization, statistical analysis, web development, and market analysis, providing valuable insights into the Arm ecosystem.
+
+## Prequisites
+
+You are free to explore your own implementation. The skills below are examples.
+
+- Intemediate understanding of an OOP language such as Python or JavaScript
+- Access to a computer with internet connectivity
+
+
+## Resources from Arm and our partners
+
+- Website: [Arm Software Ecosystem Dashboard](https://www.arm.com/developer-hub/ecosystem-dashboard)
+- Website: [Windows on Arm Support Wiki page](https://linaro.atlassian.net/wiki/spaces/WOAR/overview)
+- Website: ["Can I Use?" dashboard](https://caniuse.com/)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md
index 335d5770..029ce29e 100644
--- a/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md
+++ b/docs/_posts/2025-05-30-Arduino-IDE-Windows-on-Arm.md
@@ -1,123 +1,126 @@
----
-title: Porting and Optimizing Arduino IDE for Windows on Arm
-description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions.
-subjects:
-- Performance and Architecture
-- Migration to Arm
-- Libraries
-requires-team:
-- No
-platform:
-- Laptops and Desktops
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
-
- **Why this is important?**
-
- Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE.
-
- **Project summary**
-
- This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.).
-
- ### Key Objectives:
- - Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm.
- - Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA.
- - Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation.
- - Submit upstream patches and document issues to support long-term ecosystem health.
-
- This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience.
-
- ## Prequisites
-
-
- - Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native)
- - Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM
- - Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2`
- - Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information.
-
-
- ## Resources from Arm and our partners
-
- - Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide)
- - Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native)
- - External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132)
- - Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform
- - External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
- - Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-
-**Why this is important?**
-
-Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE.
-
-**Project summary**
-
-This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.).
-
-### Key Objectives:
-- Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm.
-- Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA.
-- Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation.
-- Submit upstream patches and document issues to support long-term ecosystem health.
-
-This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience.
-
-## Prequisites
-
-
-- Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native)
-- Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM
-- Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2`
-- Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information.
-
-
-## Resources from Arm and our partners
-
-- Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide)
-- Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native)
-- External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132)
-- Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform
-- External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
-- Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Arduino-IDE-Windows-on-Arm
+description: This self-service project ports and optimizes the Arduino IDE—patching its lzma-native dependency—to run natively and efficiently on Windows on Arm, giving developers hands-on experience with cross-platform builds, Arm64 performance tuning, and upstream open-source contributions.
+subjects:
+- Performance and Architecture
+- Migration to Arm
+- Libraries
+requires-team:
+- No
+platform:
+- Laptops and Desktops
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+badges:
+- trending
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+
+ **Why this is important?**
+
+ Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE.
+
+ **Project summary**
+
+ This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.).
+
+ ### Key Objectives:
+ - Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm.
+ - Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA.
+ - Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation.
+ - Submit upstream patches and document issues to support long-term ecosystem health.
+
+ This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience.
+
+ ## Prequisites
+
+
+ - Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native)
+ - Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM
+ - Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2`
+ - Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information.
+
+
+ ## Resources from Arm and our partners
+
+ - Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide)
+ - Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native)
+ - External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132)
+ - Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform
+ - External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+ - Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+
+**Why this is important?**
+
+Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, consumers will expect both performance and battery efficiency across all WoA applications, such as the ArduinoIDE.
+
+**Project summary**
+
+This project focuses on **porting and optimising the Arduino IDE**—an essential open-source platform for embedded development to run natively and efficiently on Windows on Arm platforms. In addition, the project tackles a key dependency, `lzma-native`, a compression library used by the IDE, which currently [**lacks support for Windows on Arm**](https://github.com/addaleax/lzma-native/issues/132) Previous attempts to build `lzma-native` on WoA failed due to architecture-specific compilation issues and native module bindings (`node-gyp`, `liblzma`, etc.).
+
+### Key Objectives:
+- Successfully build and run the [Arduino IDE](https://github.com/arduino/arduino-ide) on Windows on Arm.
+- Patch or fork [`lzma-native`](https://github.com/addaleax/lzma-native) to enable full compatibility on WoA.
+- Benchmark IDE performance and memory usage on Arm64 vs. x64 emulation.
+- Submit upstream patches and document issues to support long-term ecosystem health.
+
+This project aligns strongly with Arm’s mission to expand native software compatibility on Arm-based Windows devices. It provides students with a **deep dive into cross-platform development, native module compilation, and Arm architecture optimization**, making it ideal for CV building, community contribution, and real-world system-level experience.
+
+## Prequisites
+
+
+- Familiarity with JavaScript (Node.js), TypeScript and C++ (lzma-native)
+- Familiarity or willing to learn `CMake`, `Ninja`, `Visual Studio with C++ Desktop Dev`, UTM
+- Basic understandig of terminal programs such as `Windows Terminal`, `PowerShell` and `WSL2`
+- Access to a physical Windows on Arm device or a [WoA Virtual Machine running through UTM](https://mac.getutm.app/gallery/windows-11-arm). see the [Linaro Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments) for more information.
+
+
+## Resources from Arm and our partners
+
+- Repository: [Arduino IDE GitHub repo](https://github.com/arduino/arduino-ide)
+- Repository: [lzma-native GitHub repo](https://github.com/addaleax/lzma-native)
+- External Documentation: [Issue #132 – lzma-native Windows Arm64 build failure](https://github.com/addaleax/lzma-native/issues/132)
+- Documentation: Arm’s official [Learn on Arm](https://learn.arm.com/) platform
+- External Documentation: [Windows on Arm Environments – Linaro wiki](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+- Documentation: [Node.js native addon guides](https://nodejs.org/api/addons.html)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md
index 07a62bfa..147a764d 100644
--- a/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md
+++ b/docs/_posts/2025-05-30-Bioinformatic-Pipeline-Analysis.md
@@ -1,124 +1,127 @@
----
-title: Benchmarking Bioconda Packages for Arm64 in Bioinformatics Pipelines
-description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines.
-subjects:
-- Performance and Architecture
-- Databases
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ### Description
-
- **Why this is important?**
-
- Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck.
-
- **Project summary**
-
- This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64.
-
- The deliverables of the project are as follows:
-
- - Selection and justification of public genomic datasets.
- - Execution of bioinformatics workflows using Bioconda packages on Arm64.
- - Performance benchmarking and comparison with x86 architectures.
- - Documentation of failed package builds and proposed fixes.
- - Comprehensive report with results, analysis, and recommendations.
-
-
- ## Prequisites
-
- - Intermediate understanding of Python, Bash and nextflow
- - Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake
- - Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage
- - IP access to Public genomic databases (NCBI, ENA, etc.)
-
- ## Resources from Arm and our partners
-
- - External Documentation: [nf-core documentation](https://nf-co.re/docs/)
-
- - External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/)
-
- - Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main)
-
- - Repository: [Bioconda package repository](https://bioconda.github.io/)
-
- - Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/)
-
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-### Description
-
-**Why this is important?**
-
-Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck.
-
-**Project summary**
-
-This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64.
-
-The deliverables of the project are as follows:
-
-- Selection and justification of public genomic datasets.
-- Execution of bioinformatics workflows using Bioconda packages on Arm64.
-- Performance benchmarking and comparison with x86 architectures.
-- Documentation of failed package builds and proposed fixes.
-- Comprehensive report with results, analysis, and recommendations.
-
-
-## Prequisites
-
-- Intermediate understanding of Python, Bash and nextflow
-- Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake
-- Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage
-- IP access to Public genomic databases (NCBI, ENA, etc.)
-
-## Resources from Arm and our partners
-
-- External Documentation: [nf-core documentation](https://nf-co.re/docs/)
-
-- External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/)
-
-- Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main)
-
-- Repository: [Bioconda package repository](https://bioconda.github.io/)
-
-- Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/)
-
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Bioinformatic-Pipeline-Analysis
+description: This self-service project benchmarks Arm64 Bioconda packages in real nf-core workflows—measuring performance, diagnosing build failures, and proposing fixes that accelerate truly native bioinformatics on the expanding fleet of Arm-powered machines.
+subjects:
+- Performance and Architecture
+- Databases
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ### Description
+
+ **Why this is important?**
+
+ Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck.
+
+ **Project summary**
+
+ This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64.
+
+ The deliverables of the project are as follows:
+
+ - Selection and justification of public genomic datasets.
+ - Execution of bioinformatics workflows using Bioconda packages on Arm64.
+ - Performance benchmarking and comparison with x86 architectures.
+ - Documentation of failed package builds and proposed fixes.
+ - Comprehensive report with results, analysis, and recommendations.
+
+
+ ## Prequisites
+
+ - Intermediate understanding of Python, Bash and nextflow
+ - Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake
+ - Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage
+ - IP access to Public genomic databases (NCBI, ENA, etc.)
+
+ ## Resources from Arm and our partners
+
+ - External Documentation: [nf-core documentation](https://nf-co.re/docs/)
+
+ - External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/)
+
+ - Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main)
+
+ - Repository: [Bioconda package repository](https://bioconda.github.io/)
+
+ - Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/)
+
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+### Description
+
+**Why this is important?**
+
+Bioconda is a specialized package repository for bioinformatics and genomics. Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops running Linux and MacOS. In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. These machines facilitate the execution of computationally intensive bioinformatics and statistics tasks locally. Potential downstream applications include faster, more affordable diagnoses that can be conducted closer to hospital patients, exemplified by the pilot [ROBIN software](https://www.nottingham.ac.uk/news/genetic-brain-tumour-diagnosis). While many leading Bioconda packages now support Linux/Arm, there still emulated components that can be the bottleneck.
+
+**Project summary**
+
+This project aims to benchmark specific Bioconda packages that have been built for Arm64 using the nf-core-arm-discovery repository. The participant will utilize public genomic datasets from databases such as NCBI, select appropriate datasets, and execute bioinformatics workflows on Arm-based infrastructure. The candidate will evaluate the performance, compatibility, and efficiency of these packages, document errors and failures, and investigate the reasons behind package build failures. The final deliverable will be a detailed report with performance metrics, identified issues, and recommended improvements to enhance package support on Arm64.
+
+The deliverables of the project are as follows:
+
+- Selection and justification of public genomic datasets.
+- Execution of bioinformatics workflows using Bioconda packages on Arm64.
+- Performance benchmarking and comparison with x86 architectures.
+- Documentation of failed package builds and proposed fixes.
+- Comprehensive report with results, analysis, and recommendations.
+
+
+## Prequisites
+
+- Intermediate understanding of Python, Bash and nextflow
+- Basic experience with nf-core pipelines, Conda, Docker/Singularity, Snakemake
+- Access to Arm64-based cloud instances (e.g., AWS Graviton) with plenty of memory and storage
+- IP access to Public genomic databases (NCBI, ENA, etc.)
+
+## Resources from Arm and our partners
+
+- External Documentation: [nf-core documentation](https://nf-co.re/docs/)
+
+- External Documentation: [AWS Graviton documentation](https://aws.amazon.com/ec2/graviton/)
+
+- Repository: [Arm64 nf-core pipelines](https://github.com/ewels/nf-core-arm-discovery/tree/main)
+
+- Repository: [Bioconda package repository](https://bioconda.github.io/)
+
+- Dataset: [NCBI Datasets](https://www.ncbi.nlm.nih.gov/datasets/)
+
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md
index e2efdae0..f4f601cd 100644
--- a/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md
+++ b/docs/_posts/2025-05-30-Compliance-Ready-Smart-Camera-System.md
@@ -1,97 +1,98 @@
----
-title: End-to-End Computer Vision System for Functional Safety
-description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles.
-subjects:
-- Security
-- Embedded Linux
-- ML
-- Virtual Hardware
-requires-team:
-- Yes
-platform:
-- Mobile, Graphics, and Gaming
-- Automotive
-- IoT
-- Embedded and Microcontrollers
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-- Direct Support from Arm
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
-
-
- ## Description
-
- **Why this is important?**
-
- As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details.
-
- **Project summary**
-
- Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards.
-
- Deliverables include:
- - Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262)
- - A survey of regulatory requirements and their impact on smart camera design
- - An architectural analysis integrating Arm-based systems into a compliant automotive software stack
- - Recommendations for enhancing developer tools and reference software stacks to align with ISO standards
-
- ## Estimated Project Duration
- - Estimated Time: 6+ months
- - Participants: Team of 2+
-
- ## Resources from Arm and Arm partners
- - Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive)
- - Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html))
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
-
-
-## Description
-
-**Why this is important?**
-
-As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details.
-
-**Project summary**
-
-Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards.
-
-Deliverables include:
-- Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262)
-- A survey of regulatory requirements and their impact on smart camera design
-- An architectural analysis integrating Arm-based systems into a compliant automotive software stack
-- Recommendations for enhancing developer tools and reference software stacks to align with ISO standards
-
-## Estimated Project Duration
-- Estimated Time: 6+ months
-- Participants: Team of 2+
-
-## Resources from Arm and Arm partners
-- Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive)
-- Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html))
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Compliance-Ready-Smart-Camera-System
+description: This challenge will create and validate an Arm-based, smart camera pipeline on virtual automotive hardware—advancing safer, more developer-friendly driver-monitoring solutions for next-generation vehicles.
+subjects:
+- Security
+- Embedded Linux
+- ML
+- Virtual Hardware
+requires-team:
+- Yes
+platform:
+- Mobile, Graphics, and Gaming
+- Automotive
+- IoT
+- Embedded and Microcontrollers
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+- Direct Support from Arm
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
+
+
+ ## Description
+
+ **Why this is important?**
+
+ As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details.
+
+ **Project summary**
+
+ Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards.
+
+ Deliverables include:
+ - Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262)
+ - A survey of regulatory requirements and their impact on smart camera design
+ - An architectural analysis integrating Arm-based systems into a compliant automotive software stack
+ - Recommendations for enhancing developer tools and reference software stacks to align with ISO standards
+
+ ## Estimated Project Duration
+ - Estimated Time: 6+ months
+ - Participants: Team of 2+
+
+ ## Resources from Arm and Arm partners
+ - Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive)
+ - Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html))
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
+
+
+## Description
+
+**Why this is important?**
+
+As of June 2025, [94% of global automakers](https://newsroom.arm.com/blog/arm-zena-css-ai-defined-vehicle-compute-platform) use Arm vehicle technology. The development of compliance-ready smart camera systems is crucial for Arm as it aligns with the increasing demand for safety and reliability in automotive platforms, see the [Arm Zena compute subsystem(CSS)](https://www.arm.com/products/automotive/compute-subsystems/zena) for more details.
+
+**Project summary**
+
+Government and external regulations, particularly in automotive industries, are shaping how smart camera products are designed, implemented, and verified. This project will explore how compliance with standards like ISO 26262 influences the design of smart camera systems, such as Driver Monitoring Systems (DMS) to ensure driver attentiveness. Students or engineers will collaborate with industry-relevant platforms, such as Arm's Kronos Fixed Virtual Platform (FVP) for automotive development, to understand and propose frictionless developer experiences aligned with functional safety standards.
+
+Deliverables include:
+- Developing a full end-to-end smart camera system that can be incorporated into a functional safety environment (such as automotive or medical), following the relevant standards for development (e.g., ISO 26262)
+- A survey of regulatory requirements and their impact on smart camera design
+- An architectural analysis integrating Arm-based systems into a compliant automotive software stack
+- Recommendations for enhancing developer tools and reference software stacks to align with ISO standards
+
+## Estimated Project Duration
+- Estimated Time: 6+ months
+- Participants: Team of 2+
+
+## Resources from Arm and Arm partners
+- Learning Paths - [Automotive Development](https://www.arm.com/resources/learning-paths/automotive)
+- Software - Arm Automotive Reference Platforms (e.g., [Kronos FVP](https://arm-auto-solutions.docs.arm.com/en/v1.0/overview.html))
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md
index d1d3c355..55283664 100644
--- a/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md
+++ b/docs/_posts/2025-05-30-FPGA-Accellerator-with-DDR.md
@@ -1,74 +1,77 @@
----
-title: Linux Capable SoC FPGA Prototyping Platform with DDR Memory
-description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance.
-subjects:
-- Virtual Hardware
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- IoT
-- Embedded and Microcontrollers
-sw-hw:
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Hidden
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
- This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/).
-
-
- ## Prequisites
-
- - Languages: Verilog, SystemVerilog
- - Tooling: Vivado, ModelSim, ASIC design tools
- - Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform
- - IP access: Arm Academic Access member (link to get if they don't have it)
-
- ## Resources from Arm and our partners
-
- - External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-## Description
-This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/).
-
-
-## Prequisites
-
-- Languages: Verilog, SystemVerilog
-- Tooling: Vivado, ModelSim, ASIC design tools
-- Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform
-- IP access: Arm Academic Access member (link to get if they don't have it)
-
-## Resources from Arm and our partners
-
-- External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: FPGA-Accellerator-with-DDR
+description: This self-service project takes Arm Corstone-1000 from FPGA to silicon, delivering a DDR-backed, Linux-ready SoC platform that lets researchers plug in and evaluate custom accelerators with real-world performance.
+subjects:
+- Virtual Hardware
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- IoT
+- Embedded and Microcontrollers
+sw-hw:
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+ This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/).
+
+
+ ## Prequisites
+
+ - Languages: Verilog, SystemVerilog
+ - Tooling: Vivado, ModelSim, ASIC design tools
+ - Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform
+ - IP access: Arm Academic Access member (link to get if they don't have it)
+
+ ## Resources from Arm and our partners
+
+ - External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+## Description
+This project aims to leverage the Corstone-1000 platform to host and support a custom research accelerator. The main deliverables include prototyping the accelerator in FPGA and then creating a physical silicon implementation using a pre-verified programmable control system. The project will provide practical experience in SoC design, FPGA prototyping, and hardware acceleration. The final output will be a functional SoC FPGA prototyping platform with DDR memory, capable of running Linux and demonstrating the feasibility and performance of the design. **To undertake this project, please reach out to SoC Labs** in [this link](https://soclabs.org/).
+
+
+## Prequisites
+
+- Languages: Verilog, SystemVerilog
+- Tooling: Vivado, ModelSim, ASIC design tools
+- Hardware: FPGA development board (e.g., Xilinx or Altera), Corstone-1000 platform
+- IP access: Arm Academic Access member (link to get if they don't have it)
+
+## Resources from Arm and our partners
+
+- External Community: [SoC Labs, community for Arm-based software development](https://soclabs.org/)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-HPC-Algorithm.md b/docs/_posts/2025-05-30-HPC-Algorithm.md
index 2bb1f390..6de499fe 100644
--- a/docs/_posts/2025-05-30-HPC-Algorithm.md
+++ b/docs/_posts/2025-05-30-HPC-Algorithm.md
@@ -1,92 +1,96 @@
----
-title: Optimise Performance of an Algorithm Used in High-Performance Compute Using Scalable Vector Extensions (SVE / SVE2)
-description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains.
-subjects:
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
-
- **Why this is important?**
-
- Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2.
-
- **Project summary**
-
- This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm.
-
- ## Prequisites
-
- - Intermediate undestanding of C, C++ or Fortran.
- - Experience with high performance compute (HPC).
- - Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC.
- - Access to Arm-based servers or SVE-enabled hardware
-
- ## Resources from Arm and our partners
-
- - Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/)
- - Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/)
- - Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE)
-
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-## Description
-
-**Why this is important?**
-
-Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2.
-
-**Project summary**
-
-This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm.
-
-## Prequisites
-
-- Intermediate undestanding of C, C++ or Fortran.
-- Experience with high performance compute (HPC).
-- Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC.
-- Access to Arm-based servers or SVE-enabled hardware
-
-## Resources from Arm and our partners
-
-- Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/)
-- Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/)
-- Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE)
-
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: HPC-Algorithm
+description: This self-service project is around finding a HPC algorithm and accelerating it with Arm’s SVE/SVE2 vectorization—demonstrating how next-generation Arm hardware can deliver significant, scalable performance gains.
+subjects:
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+badges: trending
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+
+ **Why this is important?**
+
+ Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2.
+
+ **Project summary**
+
+ This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm.
+
+ ## Prequisites
+
+ - Intermediate undestanding of C, C++ or Fortran.
+ - Experience with high performance compute (HPC).
+ - Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC.
+ - Access to Arm-based servers or SVE-enabled hardware
+
+ ## Resources from Arm and our partners
+
+ - Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/)
+ - Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/)
+ - Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE)
+
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+## Description
+
+**Why this is important?**
+
+Scalable Vector Extension (SVE) is a vector extension the A64 instruction set of the Armv8-A architecture. Armv9-A builds on SVE with the SVE2 extension. Unlike other single-instruction multiple data (SIMD) architectures, SVE and SVE2 do not define the size of the vector registers, but constrains it to a range of possible values, from a minimum of 128 bits up to a maximum of 2048 in 128-bit wide units. Therefore, any CPU vendor can implement the extension by choosing the vector register size that better suits the workloads the CPU is targeting. As of J there is growing availablity of SVE-enabled hardware, such as through cloud service providers. However, not all software has taken advantage of this feature. As such there is potential performance improvements available to software libraries and applications that add support for SVE/SVE2.
+
+**Project summary**
+
+This project aims to identify and optimize the performance of an algorithm used in high-performance computing (HPC) by leveraging Scalable Vector Extensions (SVE) instructions. The main deliverable is an optimized version of the chosen algorithm that demonstrates a performance improvements using SVE. This project will provide practical experience in HPC, vectorization, and performance optimization. The final output will be a detailed report and a functional implementation of the optimized algorithm.
+
+## Prequisites
+
+- Intermediate undestanding of C, C++ or Fortran.
+- Experience with high performance compute (HPC).
+- Basic understanding of compilers such as Arm Compiler for HPC, or autovectorising compiler such as GCC.
+- Access to Arm-based servers or SVE-enabled hardware
+
+## Resources from Arm and our partners
+
+- Learning path: [Port Code to SVE](https://learn.arm.com/learning-paths/servers-and-cloud-computing/sve/)
+- Learning path: [Migrate applications that use performance libraries](https://learn.arm.com/learning-paths/servers-and-cloud-computing/using-and-porting-performance-libs/)
+- Documentation: [SVE Programmers Guide](https://developer.arm.com/documentation/102476/0101/Programming-with-SVE)
+
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md
index 40087d63..51cd36ee 100644
--- a/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md
+++ b/docs/_posts/2025-05-30-Haskell-Compiler-Windows-on-Arm.md
@@ -1,125 +1,126 @@
----
-title: Adding Windows on Arm Support to the Glasgow Haskell Compiler (GHC)
-description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration.
-subjects:
-- Migration to Arm
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
-
- **Why this is important?**
-
- The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years.
-
-
- **Project summary**
-
- Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by:
- - Enabling native compilation of Haskell code via GHC on WoA.
- - Implementing and testing architecture-specific assembly and intrinsic functions.
- - Extending the GHC build system to recognize WoA environments.
- - Integrating and validating linker and runtime support on Arm-based Windows systems.
-
- The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs.
-
- The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems.
-
- ---
-
- ## Prequisites
-
- - Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding)
- - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
- - Intemediate understanding of Arm64 Assembly (AArch64)
- - Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen)
- - Access to MSYS2 / CMake / Ninja for Windows builds
-
-
- ## Resources from Arm and our partners
-
- - External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/)
- - Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc)
- - External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
-
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-
-**Why this is important?**
-
-The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years.
-
-
-**Project summary**
-
-Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by:
-- Enabling native compilation of Haskell code via GHC on WoA.
-- Implementing and testing architecture-specific assembly and intrinsic functions.
-- Extending the GHC build system to recognize WoA environments.
-- Integrating and validating linker and runtime support on Arm-based Windows systems.
-
-The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs.
-
-The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems.
-
----
-
-## Prequisites
-
-- Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding)
-- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
-- Intemediate understanding of Arm64 Assembly (AArch64)
-- Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen)
-- Access to MSYS2 / CMake / Ninja for Windows builds
-
-
-## Resources from Arm and our partners
-
-- External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/)
-- Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc)
-- External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
-
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Haskell-Compiler-Windows-on-Arm
+description: This self-service project brings native Glasgow Haskell Compiler support to Windows on Arm—unlocking efficient Arm-laptop builds, extending Haskell’s reach, and giving contributors hands-on experience with Arm64 code generation and runtime integration.
+subjects:
+- Migration to Arm
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+
+ **Why this is important?**
+
+ The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years.
+
+
+ **Project summary**
+
+ Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by:
+ - Enabling native compilation of Haskell code via GHC on WoA.
+ - Implementing and testing architecture-specific assembly and intrinsic functions.
+ - Extending the GHC build system to recognize WoA environments.
+ - Integrating and validating linker and runtime support on Arm-based Windows systems.
+
+ The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs.
+
+ The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems.
+
+ ---
+
+ ## Prequisites
+
+ - Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding)
+ - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+ - Intemediate understanding of Arm64 Assembly (AArch64)
+ - Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen)
+ - Access to MSYS2 / CMake / Ninja for Windows builds
+
+
+ ## Resources from Arm and our partners
+
+ - External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/)
+ - Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc)
+ - External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+
+**Why this is important?**
+
+The Glasgow Haskell Compiler (GHC) is the de facto standard compiler for Haskell, an advanced purely functional programming language with strong type inference and lazy evaluation. This project aims to **port GHC to support Windows on Arm (WoA)**, a platform that is increasingly relevant with the rise of Arm-powered laptops and developer kits. Arm anticipates more original equipment manufacturers (OEMs) to be available in the coming years.
+
+
+**Project summary**
+
+Currently, GHC lacks robust support for WoA, hindering Haskell’s reach in energy-efficient and mobile-native environments (Request for support has [previously been requested by the community](https://gitlab.haskell.org/ghc/ghc/-/issues/24603)). The goal is to bridge this gap by:
+- Enabling native compilation of Haskell code via GHC on WoA.
+- Implementing and testing architecture-specific assembly and intrinsic functions.
+- Extending the GHC build system to recognize WoA environments.
+- Integrating and validating linker and runtime support on Arm-based Windows systems.
+
+The project requires in-depth familiarity with compiler backends, calling conventions, code generation pipelines, and the use of LLVM or native code generators. Students will also gain experience in cross-compilation, Windows PE/COFF linking, and performance benchmarking on Arm CPUs.
+
+The work has potential for real-world deployment and academic publishing, and would be of high value to the Haskell and Arm developer ecosystems.
+
+---
+
+## Prequisites
+
+- Advanced understanding of Haskell (including Template Haskell, Core-to-STG pipeline understanding)
+- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+- Intemediate understanding of Arm64 Assembly (AArch64)
+- Comfortable using compilers such as LLVM and Clang for backend work (if using LLVM codegen)
+- Access to MSYS2 / CMake / Ninja for Windows builds
+
+
+## Resources from Arm and our partners
+
+- External Documentation: [GHC Development Wiki](https://gitlab.haskell.org/ghc/ghc/-/wikis/)
+- Repository: [GHC source tree](https://gitlab.haskell.org/ghc/ghc)
+- External Documentation: [Linaro WoA Support Documentation](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Human-Centric-Robotics.md b/docs/_posts/2025-05-30-Human-Centric-Robotics.md
index 9474b068..f936d4bf 100644
--- a/docs/_posts/2025-05-30-Human-Centric-Robotics.md
+++ b/docs/_posts/2025-05-30-Human-Centric-Robotics.md
@@ -1,113 +1,114 @@
----
-title: Human-Centric Robotics – Urban Deployment & Socioeconomic Modelling
-description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services.
-subjects:
-- ML
-- Embedded Linux
-- RTOS Fundamentals
-requires-team:
-- Yes
-platform:
-- Automotive
-- IoT
-- Embedded and Microcontrollers
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-- Direct Support from Arm
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
-
- ## Description
-
- **Why this is important?**
-
- Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics.
-
- **Project Summary**
-
- This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection.
-
-
- Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation).
-
- Potential Deliverables include:
- - A working prototype running on an Arm-based platform
- - Software stack (navigation, ML inference, interaction logic)
- - Field evaluation results & UX data (e.g., survey or usage logs)
- - Report of development process and considerations when prototyping an end-user product.
- - A socioeconomic impact report using modeling or simulation techniques
-
- *Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).*
-
- ## Estimated Project Duration
-
- 6+ months
- Team size: 2+ participants
-
- ## Prerequisites
-
- - **Languages**: Familiarity with an OOP language.
- - **Hardware**:
- - **IP/Cloud Access**:
- - Any cloud service provider with Arm-based instances (for model training or data analysis)
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
-
-## Description
-
-**Why this is important?**
-
-Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics.
-
-**Project Summary**
-
-This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection.
-
-
-Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation).
-
-Potential Deliverables include:
-- A working prototype running on an Arm-based platform
-- Software stack (navigation, ML inference, interaction logic)
-- Field evaluation results & UX data (e.g., survey or usage logs)
-- Report of development process and considerations when prototyping an end-user product.
-- A socioeconomic impact report using modeling or simulation techniques
-
-*Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).*
-
-## Estimated Project Duration
-
-6+ months
-Team size: 2+ participants
-
-## Prerequisites
-
-- **Languages**: Familiarity with an OOP language.
-- **Hardware**:
-- **IP/Cloud Access**:
- - Any cloud service provider with Arm-based instances (for model training or data analysis)
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Human-Centric-Robotics
+description: This team project will build and test an Arm-based urban service robot—merging real-time navigation, vision-guided manipulation, and human interaction—and model its socioeconomic impact to show how Arm platforms can transform last-mile delivery, eldercare, or other city services.
+subjects:
+- ML
+- Embedded Linux
+- RTOS Fundamentals
+requires-team:
+- Yes
+platform:
+- Automotive
+- IoT
+- Embedded and Microcontrollers
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+- Direct Support from Arm
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
+
+ ## Description
+
+ **Why this is important?**
+
+ Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics.
+
+ **Project Summary**
+
+ This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection.
+
+
+ Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation).
+
+ Potential Deliverables include:
+ - A working prototype running on an Arm-based platform
+ - Software stack (navigation, ML inference, interaction logic)
+ - Field evaluation results & UX data (e.g., survey or usage logs)
+ - Report of development process and considerations when prototyping an end-user product.
+ - A socioeconomic impact report using modeling or simulation techniques
+
+ *Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).*
+
+ ## Estimated Project Duration
+
+ 6+ months
+ Team size: 2+ participants
+
+ ## Prerequisites
+
+ - **Languages**: Familiarity with an OOP language.
+ - **Hardware**:
+ - **IP/Cloud Access**:
+ - Any cloud service provider with Arm-based instances (for model training or data analysis)
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+Reach out to Arm at [education@arm.com](mailto:education@arm.com) if you'd like to participate in this challenge.
+
+## Description
+
+**Why this is important?**
+
+Arm is transitioning from traditional IP to providing platforms for the AI era, please see a recent [news post](https://newsroom.arm.com/news/new-arm-product-naming-architecture) for more details. This project is crucial for Arm as it showcases the versatility of Arm-based platforms in real-world applications, enhancing their relevance in the robotics sector. Furthermore, the insights gained from this project can inform future developments and partnerships and share future platforms related to robotics.
+
+**Project Summary**
+
+This project challenges students to design, build, and evaluate a human-centric robotic system for urban deployment using Arm-based compute platforms such as Raspberry Pi 5, NVIDIA Jetson Orin Nano, etc. The primary focus is on deploying prototypes in a controlled campus environment for applications like last-mile delivery, eldercare assistance, or smart waste collection.
+
+
+Participants will integrate real-time navigation, object manipulation, and human-interaction modules using state-of-the-art computer vision and sensor fusion frameworks. The second phase involves simulating or evaluating the robot’s impact on urban workflows and labor markets, including surveys or socioeconomic modeling techniques (e.g., system dynamics or agent-based simulation).
+
+Potential Deliverables include:
+- A working prototype running on an Arm-based platform
+- Software stack (navigation, ML inference, interaction logic)
+- Field evaluation results & UX data (e.g., survey or usage logs)
+- Report of development process and considerations when prototyping an end-user product.
+- A socioeconomic impact report using modeling or simulation techniques
+
+*Note: Arm does not offer direct channels to municipalities or public testing environments. Projects should focus on campus deployments, simulated environments (e.g., Gazebo).*
+
+## Estimated Project Duration
+
+6+ months
+Team size: 2+ participants
+
+## Prerequisites
+
+- **Languages**: Familiarity with an OOP language.
+- **Hardware**:
+- **IP/Cloud Access**:
+ - Any cloud service provider with Arm-based instances (for model training or data analysis)
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md
index 56322c58..4dc49866 100644
--- a/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md
+++ b/docs/_posts/2025-05-30-LLM-Benchmark-on-Arm-Server.md
@@ -1,77 +1,80 @@
----
-title: LLM Benchmark for Arm Server
-description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads.
-subjects:
-- ML
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Hidden
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
- This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development.
-
-
- ## Prequisites
-
- - Intermediate understanding of Python and C++
- - Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch
- - Access to physcial Arm-based server or access to cloud service providers
-
- ## Resources from Arm and our partners
-
- - Repository: [MLPerf Inference ](https://github.com/mlcommons/inference)
- - External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/)
- - Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-## Description
-This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development.
-
-
-## Prequisites
-
-- Intermediate understanding of Python and C++
-- Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch
-- Access to physcial Arm-based server or access to cloud service providers
-
-## Resources from Arm and our partners
-
-- Repository: [MLPerf Inference ](https://github.com/mlcommons/inference)
-- External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/)
-- Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: LLM-Benchmark-on-Arm-Server
+description: This self-service project sets up a reproducible MLPerf Inference workflow to benchmark large-language-model performance across Arm server configurations—yielding hard data that guides optimization of Arm hardware and software stacks for AI workloads.
+subjects:
+- ML
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+ This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development.
+
+
+ ## Prequisites
+
+ - Intermediate understanding of Python and C++
+ - Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch
+ - Access to physcial Arm-based server or access to cloud service providers
+
+ ## Resources from Arm and our partners
+
+ - Repository: [MLPerf Inference ](https://github.com/mlcommons/inference)
+ - External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/)
+ - Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+## Description
+This project aims to benchmark inference on Arm-based servers using the MLPerf Inference benchmark suite. The project spans performance analysis across different configurations of Arm-based servers. The main deliverable is a comprehensive benchmarking setup that can evaluate the performance of large language models (LLMs) on various Arm server configurations in addition to a report highlighting the performance difference and how to recreate the results. This project will provide practical experience in benchmarking, performance analysis, and working with Arm-based server architectures. The final output will be a detailed report and a functional benchmarking infrastructure that can be used for further research and development.
+
+
+## Prequisites
+
+- Intermediate understanding of Python and C++
+- Intemediate understanding of ML frameworks such as MLPerf, TensorFlow and PyTorch
+- Access to physcial Arm-based server or access to cloud service providers
+
+## Resources from Arm and our partners
+
+- Repository: [MLPerf Inference ](https://github.com/mlcommons/inference)
+- External Documentation: [MLPerf Inference Benchmark Suite](https://mlcommons.org/en/inference-datacenter-20/)
+- Blog: [Arm Server inference performance](https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/machine-learning-inference-on-aws-graviton3)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md
index c1ce0828..6bea1cd0 100644
--- a/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md
+++ b/docs/_posts/2025-05-30-Machine-Learning-on-AWS-Graviton.md
@@ -1,111 +1,112 @@
----
-title: Efficient Inference of text-to-video (OpenSora) on AWS Graviton Instances
-description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads.
-subjects:
-- ML
-- Migration to Arm
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
- ## Description
-
- **Why is this important?**
-
- This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency.
-
-
- **Project Summary**
-
- The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies.
-
- ## Prequisites
-
- - Intemediate understanding of Python.
- - Understanding of transformer architectures, vision transformer architectures and inference optimization
- - Experience using PyTorch or ONNX Runtime (CPU execution provider)
- - Experience with libraries such as Hugging Face Transformers, torchvision
- - Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`)
- - Familiarity with Linux, Docker, and cloud environments
-
-
- ## Resources from Arm and our partners
-
-
- - Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml)
- - Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning)
- - Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/)
- - External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora)
- - Repository: [GGML library](https://github.com/ggml-org/ggml)
-
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-## Description
-
-**Why is this important?**
-
-This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency.
-
-
-**Project Summary**
-
-The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies.
-
-## Prequisites
-
-- Intemediate understanding of Python.
-- Understanding of transformer architectures, vision transformer architectures and inference optimization
-- Experience using PyTorch or ONNX Runtime (CPU execution provider)
-- Experience with libraries such as Hugging Face Transformers, torchvision
-- Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`)
-- Familiarity with Linux, Docker, and cloud environments
-
-
-## Resources from Arm and our partners
-
-
-- Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml)
-- Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning)
-- Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/)
-- External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora)
-- Repository: [GGML library](https://github.com/ggml-org/ggml)
-
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Machine-Learning-on-AWS-Graviton
+description: This self-service project ports and tunes OpenSora text-to-video transformers on AWS Graviton CPUs—showcasing cost-efficient, quantized, CPU-only inference pipelines and guiding best-practice optimization for Arm-based cloud AI workloads.
+subjects:
+- ML
+- Migration to Arm
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+ ## Description
+
+ **Why is this important?**
+
+ This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency.
+
+
+ **Project Summary**
+
+ The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies.
+
+ ## Prequisites
+
+ - Intemediate understanding of Python.
+ - Understanding of transformer architectures, vision transformer architectures and inference optimization
+ - Experience using PyTorch or ONNX Runtime (CPU execution provider)
+ - Experience with libraries such as Hugging Face Transformers, torchvision
+ - Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`)
+ - Familiarity with Linux, Docker, and cloud environments
+
+
+ ## Resources from Arm and our partners
+
+
+ - Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml)
+ - Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning)
+ - Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/)
+ - External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora)
+ - Repository: [GGML library](https://github.com/ggml-org/ggml)
+
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+## Description
+
+**Why is this important?**
+
+This project investigates the deployment and optimization of text-to-video transformer models on Arm-based instances, leveraging CPU-only execution for cost-effective and scalable inference. Vision Transformers, though typically run on GPUs, are increasingly desire to operate in resource-constrained environments for power efficiency.
+
+
+**Project Summary**
+
+The aim of this project is to port, benchmark, and optimize a pre-trained ViT model (e.g., OpenSora) on Arm-based instances. This could include post-training quantization and investigation in how to speed up performance. Students will explore efficiency techniques such as INT8 quantization, refactoring of expensive operations, and memory-efficient transformer kernels, and compare results across GPU and CPU platforms. Deliverables include a reproducable inference pipeline and a technical report outlining bottlenecks and optimization strategies.
+
+## Prequisites
+
+- Intemediate understanding of Python.
+- Understanding of transformer architectures, vision transformer architectures and inference optimization
+- Experience using PyTorch or ONNX Runtime (CPU execution provider)
+- Experience with libraries such as Hugging Face Transformers, torchvision
+- Access to Arm-based instances such as AWS Graviton3/Graviton4 (`c7g`, `m7g`, or `r7g`)
+- Familiarity with Linux, Docker, and cloud environments
+
+
+## Resources from Arm and our partners
+
+
+- Learning Paths: [Arm AI Learning Paths](https://learn.arm.com/tag/ml)
+- Repository: [AWS Machine Learning Guide](https://github.com/aws/aws-graviton-getting-started/tree/main/machinelearning)
+- Blog: [AWS SageMaker](https://aws.amazon.com/blogs/machine-learning/run-machine-learning-inference-workloads-on-aws-graviton-based-instances-with-amazon-sagemaker/)
+- External Documentation: [OpenSora Documentation](https://github.com/hpcaitech/Open-Sora)
+- Repository: [GGML library](https://github.com/ggml-org/ggml)
+
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md
index fc7734d1..932418f5 100644
--- a/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md
+++ b/docs/_posts/2025-05-30-Processor-in-the-Loop-Automotive.md
@@ -1,121 +1,122 @@
----
-title: Processor in the Loop Automotive Controller on an Arm Cortex M7 Fast Model
-description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor.
-subjects:
-- Embedded Linux
-- RTOS Fundamentals
-- Virtual Hardware
-requires-team:
-- No
-platform:
-- Laptops and Desktops
-- Automotive
-- Embedded and Microcontrollers
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
-
- **Why this is important**
-
- Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap.
-
- **Project summary**
-
- Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report.
-
- ## Prequisites
-
- - [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu)
- - Familiarity with C/C++, Simulink, Stateflow and Embedded Coder
- - Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer
- - Understanding of automotive software development such as V-Model lifecycle methodology.
-
-
- ## Resources from Arm and our partners
-
- - Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html)
- - Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html)
- - Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html)
- - Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html)
- - Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html)
- - Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav)
- - Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html)
- - Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
- - Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html)
- - Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html)
- - Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html)
- - Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
- - Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-
-**Why this is important**
-
-Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap.
-
-**Project summary**
-
-Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report.
-
-## Prequisites
-
-- [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu)
-- Familiarity with C/C++, Simulink, Stateflow and Embedded Coder
-- Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer
-- Understanding of automotive software development such as V-Model lifecycle methodology.
-
-
-## Resources from Arm and our partners
-
-- Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html)
-- Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html)
-- Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html)
-- Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html)
-- Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html)
-- Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav)
-- Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html)
-- Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
-- Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html)
-- Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html)
-- Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html)
-- Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
-- Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Processor-in-the-Loop-Automotive
+description: Verify a Simulink automotive controller by running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor.
+subjects:
+- Embedded Linux
+- RTOS Fundamentals
+- Virtual Hardware
+requires-team:
+- No
+platform:
+- Laptops and Desktops
+- Automotive
+- Embedded and Microcontrollers
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+
+ **Why this is important**
+
+ Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap.
+
+ **Project summary**
+
+ Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report.
+
+ ## Prequisites
+
+ - [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu)
+ - Familiarity with C/C++, Simulink, Stateflow and Embedded Coder
+ - Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer
+ - Understanding of automotive software development such as V-Model lifecycle methodology.
+
+
+ ## Resources from Arm and our partners
+
+ - Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html)
+ - Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html)
+ - Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html)
+ - Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html)
+ - Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html)
+ - Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav)
+ - Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html)
+ - Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
+ - Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html)
+ - Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html)
+ - Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html)
+ - Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
+ - Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+
+**Why this is important**
+
+Modern automotive software development requires early verification of embedded software to meet performance and safety standards. Engineers must validate control algorithms long before physical ECUs are available. Processor in the Loop (PIL) verification bridges this gap by executing auto-generated embedded C code on a virtual processor while checking for functional equivalence and timing compliance. This challenge mirrors the real-world V-Model verification process used by OEMs and Tier 1 suppliers, offering hands-on experience in model-based design, embedded code generation, and in-the-loop testing on a virtual Arm Cortex M7 core.Modern vehicles demand rigorous, early-stage verification of embedded software to satisfy safety, performance and homologation requirements. Waiting for physical ECUs delays feedback and drives up cost; virtual processor testing closes this gap.
+
+**Project summary**
+
+Start with a prebuilt Simulink automotive control model and drive it through a complete model-based development and verification workflow. This includes defining detailed software requirements, designing test scenarios, generating C code from the controller subsystem, running processor-in-the-loop (PIL) tests on a virtual Arm Cortex M7 processor, analyzing execution time, and publishing a complete verification report.
+
+## Prequisites
+
+- [MATLAB & Simulink License](https://uk.mathworks.com/pricing-licensing.html?prodcode=ML&intendeduse=edu)
+- Familiarity with C/C++, Simulink, Stateflow and Embedded Coder
+- Familiarity with Processor-in-the-Loop (PIL), Code Profile Analyzer
+- Understanding of automotive software development such as V-Model lifecycle methodology.
+
+
+## Resources from Arm and our partners
+
+- Built-in Simulink Automotive Example: [Automatic Climate Control](https://www.mathworks.com/help/simulink/slref/simulating-automatic-climate-control-systems.html)
+- Built-in Simulink Automotive Example: [Tire Pressure Monitoring System (TPMS)]( https://www.mathworks.com/help/simulink/ug/wirelesss-tire-pressure-monitoring-system-with-fault-logging.html)
+- Built-in Simulink Automotive Example: [Anti-Lock Braking System (ABS)]( https://www.mathworks.com/help/simulink/slref/modeling-an-anti-lock-braking-system.html)
+- Define Requirements: [Requirements Toolbox™](https://www.mathworks.com/products/requirements-toolbox.html)
+- Code Generation: [MathWorks Embedded Coder®](https://uk.mathworks.com/products/embedded-coder.html)
+- Model-in-the-Loop Test: [Simulink Test™](https://www.mathworks.com/help/sltest/index.html?s_tid=CRUX_lftnav)
+- Measure Code Coverage: [Simulink Coverage™](https://www.mathworks.com/help/slcoverage/index.html)
+- Hardware Implementation: [Arm Cortex M7 (Fast Model)](https://developer.arm.com/Tools%20and%20Software/Fast%20Models), [Embedded Coder Support Package for Arm Cortex M Fast Models]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
+- Conduct Execution Profiling: [Code Profile Analyzer](https://www.mathworks.com/help/ecoder/ref/codeprofileanalyzer-app.html)
+- Perform Static Code Analysis: [Polyspace®](https://www.mathworks.com/products/polyspace.html)
+- Documentation: [MATLAB and Simulink for Verification and Validation](https://www.mathworks.com/solutions/verification-validation.html)
+- Documentation: [ARM Cortex-M Support from Embedded Coder]( https://www.mathworks.com/hardware-support/arm-cortex-m.html)
+- Documentation: [Arm Fast Models](https://uk.mathworks.com/products/connections/product_detail/arm-fast-models.html)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md
index dc797025..52de2ef9 100644
--- a/docs/_posts/2025-05-30-Quantisation-Aware-Training.md
+++ b/docs/_posts/2025-05-30-Quantisation-Aware-Training.md
@@ -1,106 +1,107 @@
----
-title: 'Quantization-Aware Training for Mobile Deployment: Deploying Lightweight Models on Arm'
-description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community.
-subjects:
-- ML
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- Mobile, Graphics, and Gaming
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Hidden
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
-
- This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones).
-
- The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API )
-
- Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like:
- - Sign language recognition for accessibility.
- - Visual anomaly detection in manufacturing.
- - Personal health and activity monitoring from camera feeds.
-
- The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions.
-
- ## Prequisites
-
- - **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android).
- - **Frameworks**: Intermediate understanding of PyTorch
- - **Tooling**: PyTorch Lightning, Android Studio
- - **Hardware Options**:
- - Android phone with Arm Cortex-A CPU or simulator through Android Studio.
- - **Deployment Targets**:
- - Android
-
- ## Resources from Arm and our partners
-
- - Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html)
- - Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html)
- - Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index)
- - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-## Description
-
-This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones).
-
-The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API )
-
-Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like:
-- Sign language recognition for accessibility.
-- Visual anomaly detection in manufacturing.
-- Personal health and activity monitoring from camera feeds.
-
-The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions.
-
-## Prequisites
-
-- **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android).
-- **Frameworks**: Intermediate understanding of PyTorch
-- **Tooling**: PyTorch Lightning, Android Studio
-- **Hardware Options**:
- - Android phone with Arm Cortex-A CPU or simulator through Android Studio.
-- **Deployment Targets**:
- - Android
-
-## Resources from Arm and our partners
-
-- Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html)
-- Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html)
-- Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index)
-- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Quantisation-Aware-Training
+description: This self-service project applies PyTorch quantization-aware training to compress and accelerate vision models for Arm-powered Android devices—enabling real-time, on-device AI while sharing the resulting lightweight models with the Hugging Face community.
+subjects:
+- ML
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- Mobile, Graphics, and Gaming
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+
+ This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones).
+
+ The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API )
+
+ Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like:
+ - Sign language recognition for accessibility.
+ - Visual anomaly detection in manufacturing.
+ - Personal health and activity monitoring from camera feeds.
+
+ The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions.
+
+ ## Prequisites
+
+ - **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android).
+ - **Frameworks**: Intermediate understanding of PyTorch
+ - **Tooling**: PyTorch Lightning, Android Studio
+ - **Hardware Options**:
+ - Android phone with Arm Cortex-A CPU or simulator through Android Studio.
+ - **Deployment Targets**:
+ - Android
+
+ ## Resources from Arm and our partners
+
+ - Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html)
+ - Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html)
+ - Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index)
+ - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+## Description
+
+This open-ended project invites students to explore **Quantization-Aware Training (QAT)** with PyTorch to optimize computer vision models for **Arm-based mobile devices** (e.g., Android smartphones).
+
+The project centers on training a model using a **non-restrictively licensed dataset** and deploying it either on **Arm-powered mobile devices** (leveraging Android Neural Networks API )
+
+Students will apply QAT to maintain accuracy while reducing model size and inference latency, making it suitable for real-time applications like:
+- Sign language recognition for accessibility.
+- Visual anomaly detection in manufacturing.
+- Personal health and activity monitoring from camera feeds.
+
+The project encourages referencing work by contributing **optimized and quantized models for Arm platforms** on HuggingFace. The final quantized model will be **uploaded to HuggingFace** and may be submitted for listing in the [Arm on HuggingFace space](https://huggingface.co/Arm), encouraging open, community-supported contributions.
+
+## Prequisites
+
+- **Languages**: Familiar with Python, pytorch and Java/Kotlin (if Android).
+- **Frameworks**: Intermediate understanding of PyTorch
+- **Tooling**: PyTorch Lightning, Android Studio
+- **Hardware Options**:
+ - Android phone with Arm Cortex-A CPU or simulator through Android Studio.
+- **Deployment Targets**:
+ - Android
+
+## Resources from Arm and our partners
+
+- Documentation: [Quantization in PyTorch](https://pytorch.org/docs/stable/quantization.html)
+- Blog: [Google Media Pipe](https://android-developers.googleblog.com/2024/10/bring-your-ai-model-to-android-devices.html)
+- Datasets: [Hugging Face Datasets](https://huggingface.co/docs/datasets/en/index)
+- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-R-Arm-Community-Support.md b/docs/_posts/2025-05-30-R-Arm-Community-Support.md
index 6b092ae6..b3d2576c 100644
--- a/docs/_posts/2025-05-30-R-Arm-Community-Support.md
+++ b/docs/_posts/2025-05-30-R-Arm-Community-Support.md
@@ -1,138 +1,141 @@
----
-title: Improving R Support for the Windows on Arm Community
-description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops.
-subjects:
-- Performance and Architecture
-- Migration to Arm
-- Libraries
-requires-team:
-- No
-platform:
-- Laptops and Desktops
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
-
- **Why this is important?**
-
- Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms.
-
- **Project summary**
-
-
- This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include:
-
-
- - **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support.
- - **Proposing and testing patches upstream** for R packages that fail to build or run on WoA.
- - **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches.
- - **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/)
- - **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds.
-
- Stretch Objectives:
-
- - **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain).
-
- The deliverables include:
-
- - Patches, request for comments and bug reports the highest impact packages
- - A curated list of packages with proposed WoA support status
- - A short technical write-up describing the contributions and challenges
-
- ## Prequisites
-
- - Intermediate understanding of the R language
- - Intermediate understanding of Rtools, Git and Docker for cross-compilation.
- - Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD.
- - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments).
-
- ## Resources from Arm and our partners
-
- - Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file)
- - Documentation: [R Bugzilla](https://bugs.r-project.org/)
- - Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/)
- - Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/)
- - Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-
-**Why this is important?**
-
-Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms.
-
-**Project summary**
-
-
-This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include:
-
-
-- **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support.
-- **Proposing and testing patches upstream** for R packages that fail to build or run on WoA.
-- **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches.
-- **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/)
-- **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds.
-
-Stretch Objectives:
-
-- **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain).
-
-The deliverables include:
-
-- Patches, request for comments and bug reports the highest impact packages
-- A curated list of packages with proposed WoA support status
-- A short technical write-up describing the contributions and challenges
-
-## Prequisites
-
-- Intermediate understanding of the R language
-- Intermediate understanding of Rtools, Git and Docker for cross-compilation.
-- Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD.
-- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments).
-
-## Resources from Arm and our partners
-
-- Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file)
-- Documentation: [R Bugzilla](https://bugs.r-project.org/)
-- Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/)
-- Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/)
-- Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: R-Arm-Community-Support
+description: This self-service project boosts the R ecosystem on Windows on Arm by identifying unsupported packages, upstreaming fixes, and automating builds—so data scientists can run their workflows natively on fast, efficient Arm64 laptops and desktops.
+subjects:
+- Performance and Architecture
+- Migration to Arm
+- Libraries
+requires-team:
+- No
+platform:
+- Laptops and Desktops
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+
+ **Why this is important?**
+
+ Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms.
+
+ **Project summary**
+
+
+ This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include:
+
+
+ - **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support.
+ - **Proposing and testing patches upstream** for R packages that fail to build or run on WoA.
+ - **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches.
+ - **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/)
+ - **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds.
+
+ Stretch Objectives:
+
+ - **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain).
+
+ The deliverables include:
+
+ - Patches, request for comments and bug reports the highest impact packages
+ - A curated list of packages with proposed WoA support status
+ - A short technical write-up describing the contributions and challenges
+
+ ## Prequisites
+
+ - Intermediate understanding of the R language
+ - Intermediate understanding of Rtools, Git and Docker for cross-compilation.
+ - Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD.
+ - Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments).
+
+ ## Resources from Arm and our partners
+
+ - Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file)
+ - Documentation: [R Bugzilla](https://bugs.r-project.org/)
+ - Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/)
+ - Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/)
+ - Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+
+**Why this is important?**
+
+Since 2020, there has been notable growth in multi-core Arm-based laptops and desktops, including the recent launch of Windows on Arm (WOA). In the coming years, Arm anticipates an increase in available OEM (original equipment manufacturer) devices. As such, developers will expect packages to be available for WoA so that downstream applications can more easily build for WoA platforms.
+
+**Project summary**
+
+
+This project aims to significantly enhance the support for running **R packages on Windows 11 for Arm64 (WoA)** by identifying and contributing bug fixes / improvements to the relevant parts of the R community (e.g., CRAN/Bioconductor packages or even R Core / Rtools etc.). The project’s goals include:
+
+
+- **Identifying CRAN and Bioconductor packages** lacking Windows/Arm64 support.
+- **Proposing and testing patches upstream** for R packages that fail to build or run on WoA.
+- **Engaging with the R development community** via the Windows Special interest group, [R-SIG-windows](https://stat.ethz.ch/mailman/listinfo/r-sig-windows), [R-Package-Devel](https://stat.ethz.ch/mailman/listinfo/r-package-devel) mailing list or the informal [R Contributors Slack](https://contributor.r-project.org/slack) and following the [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file) to submit high-quality patches.
+- **Reporting new issues**, requesting comments on proposed patches, documenting process via Bugzilla and reviewing existing issues through the [bug tracker](https://bugs.r-project.org/)
+- **Tracking CI coverage** for recently announced public preview of [Windows11-Arm64 GitHub-hosted runners](https://github.blog/changelog/2025-04-14-windows-arm64-hosted-runners-now-available-in-public-preview/) and potentially proposing GitHub Actions or GitLab CI templates to automate WoA builds.
+
+Stretch Objectives:
+
+- **Identifying, Analyzing and fixing compatibility issues** in base R and Rtools for the Windows/Arm64 environment. This may involve waiting for improved upstream support from GCC for Windows-AArch64. A summary and progress is [available here](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/28802842658/MinGW+GNU+Toolchain).
+
+The deliverables include:
+
+- Patches, request for comments and bug reports the highest impact packages
+- A curated list of packages with proposed WoA support status
+- A short technical write-up describing the contributions and challenges
+
+## Prequisites
+
+- Intermediate understanding of the R language
+- Intermediate understanding of Rtools, Git and Docker for cross-compilation.
+- Basic understanding or willingness to learn Bugzilla, Windows 11 operating system and GitHub CI/CD.
+- Arm64 Windows device or Access to virtualized WoA platforms via [Linaro’s Windows on Arm Environments](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments).
+
+## Resources from Arm and our partners
+
+- Documentation: [R Contribution Guide](https://github.com/r-devel/rdevguide?tab=readme-ov-file)
+- Documentation: [R Bugzilla](https://bugs.r-project.org/)
+- Documentation: [Rtools for Windows](https://cran.r-project.org/bin/windows/Rtools/)
+- Documentation: [Bioconductor Build Reports](https://bioconductor.org/checkResults/)
+- Documentation: Package installation results for [CRAN](https://www.r-project.org/nosvn/winutf8/ucrt3/CRAN_aarch64/install_out/) and [Bioconductor](https://www.r-project.org/nosvn/winutf8/ucrt3/BIOC_aarch64/install_out/) packages
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors , who are part of the Arm Developer program and the R community. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md
index 5e81dc6b..c1dd78ce 100644
--- a/docs/_posts/2025-05-30-Real-Time-Image-Classification.md
+++ b/docs/_posts/2025-05-30-Real-Time-Image-Classification.md
@@ -1,98 +1,99 @@
----
-title: Running Real-Time Image Classification on Arm Cortex-M with CMSIS-NN
-description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers.
-subjects:
-- ML
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- IoT
-- Embedded and Microcontrollers
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Hidden
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
- This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller.
-
-
- ## Prequisites
-
- - Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C.
- - Tooling:
- - TensorFlow Lite
- - CMSIS-NN
- - Keil MDK
- - Hardware:
- - Arm Cortex-M based microcontroller development board and compatible camera module.
- - Access to hardware suitable for training neural networks
-
- ## Resources from Arm and our partners
-
- - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
- - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
- - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
- - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller.
-
-
-## Prequisites
-
-- Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C.
-- Tooling:
- - TensorFlow Lite
- - CMSIS-NN
- - Keil MDK
-- Hardware:
- - Arm Cortex-M based microcontroller development board and compatible camera module.
- - Access to hardware suitable for training neural networks
-
-## Resources from Arm and our partners
-
-- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
-- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
-- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
-- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Real-Time-Image-Classification
+description: This self-service project trains, quantizes, and CMSIS-NN-deploys a CNN to achieve real-time image classification on an Arm Cortex-M board—demonstrating low-power, edge-ready AI on microcontrollers.
+subjects:
+- ML
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- IoT
+- Embedded and Microcontrollers
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+ This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller.
+
+
+ ## Prequisites
+
+ - Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C.
+ - Tooling:
+ - TensorFlow Lite
+ - CMSIS-NN
+ - Keil MDK
+ - Hardware:
+ - Arm Cortex-M based microcontroller development board and compatible camera module.
+ - Access to hardware suitable for training neural networks
+
+ ## Resources from Arm and our partners
+
+ - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
+ - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
+ - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
+ - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+This project aims to develop a real-time image classification system using Convolutional Neural Networks (CNN) on an Arm Cortex-M microcontroller with CMSIS-NN. The main deliverables include training a CNN model on a custom dataset, quantizing the model to deploy it on resource-constrained devices, and transforming the model into a C format to compile and run on the microcontroller. The project will provide practical experience in running AI models on edge devices and optimizing AI models for efficient performance. The final output will be a functional image classification system capable of real-time processing on a microcontroller.
+
+
+## Prequisites
+
+- Languages: Python, ML framework experience (TensorFlowLite for microcontroller or Pytorch / Executorch), Embedded programming in C.
+- Tooling:
+ - TensorFlow Lite
+ - CMSIS-NN
+ - Keil MDK
+- Hardware:
+ - Arm Cortex-M based microcontroller development board and compatible camera module.
+ - Access to hardware suitable for training neural networks
+
+## Resources from Arm and our partners
+
+- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
+- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
+- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
+- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md
index 78d93c44..46430340 100644
--- a/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md
+++ b/docs/_posts/2025-05-30-Responsible-AI-and-Yellow-Teaming.md
@@ -1,144 +1,145 @@
----
-title: Responsible AI and Yellow Teaming
-description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle.
-subjects:
-- ML
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
-
-
- **Why this is important?**
-
- AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides.
-
- **Project summary**
-
- This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape.
-
- The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices.
-
- Key Objectives of Your Project
- - Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems.
- - Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions.
- - Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones.
- - Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated.
-
-
- ## Prequisites
-
- If deploying a private Llama model ->
- - **Hardware**:
- - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors.
- - **Software**:
- - PyTorch and Hugging Face account
- - `torchchat` repo and dependencies
- - Hugging Face CLI for LLM download
- - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`)
- - **Skills**:
- - Proficiency in Python and PyTorch
- - [Hugging Face account](https://huggingface.co/)
- - Understanding of LLMs and prompting techniques
-
- If using a public LLM ->
- - **Hardware**:
- - None needed
- - **Software**:
- - Access to a public LLM
- - **Skills**:
- - Understanding of LLMs and prompting techniques
-
- ## Resources from Arm and our partners
-
- - External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course)
- - Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/)
- - Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-## Description
-
-
-**Why this is important?**
-
-AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides.
-
-**Project summary**
-
-This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape.
-
-The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices.
-
-Key Objectives of Your Project
-- Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems.
-- Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions.
-- Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones.
-- Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated.
-
-
-## Prequisites
-
-If deploying a private Llama model ->
-- **Hardware**:
- - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors.
-- **Software**:
- - PyTorch and Hugging Face account
- - `torchchat` repo and dependencies
- - Hugging Face CLI for LLM download
- - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`)
-- **Skills**:
- - Proficiency in Python and PyTorch
- - [Hugging Face account](https://huggingface.co/)
- - Understanding of LLMs and prompting techniques
-
-If using a public LLM ->
-- **Hardware**:
- - None needed
-- **Software**:
- - Access to a public LLM
-- **Skills**:
- - Understanding of LLMs and prompting techniques
-
-## Resources from Arm and our partners
-
-- External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course)
-- Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/)
-- Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Responsible-AI-and-Yellow-Teaming
+description: This self-service project equips teams with a YellowTeamGPT workflow that probes Arm-based AI products for unintended impacts—turning responsible-AI stress-testing into a core step of the development cycle.
+subjects:
+- ML
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+
+
+ **Why this is important?**
+
+ AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides.
+
+ **Project summary**
+
+ This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape.
+
+ The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices.
+
+ Key Objectives of Your Project
+ - Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems.
+ - Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions.
+ - Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones.
+ - Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated.
+
+
+ ## Prequisites
+
+ If deploying a private Llama model ->
+ - **Hardware**:
+ - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors.
+ - **Software**:
+ - PyTorch and Hugging Face account
+ - `torchchat` repo and dependencies
+ - Hugging Face CLI for LLM download
+ - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`)
+ - **Skills**:
+ - Proficiency in Python and PyTorch
+ - [Hugging Face account](https://huggingface.co/)
+ - Understanding of LLMs and prompting techniques
+
+ If using a public LLM ->
+ - **Hardware**:
+ - None needed
+ - **Software**:
+ - Access to a public LLM
+ - **Skills**:
+ - Understanding of LLMs and prompting techniques
+
+ ## Resources from Arm and our partners
+
+ - External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course)
+ - Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/)
+ - Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+## Description
+
+
+**Why this is important?**
+
+AI products are becoming more capable by the day. But unless we think carefully about how we build AI systems, we risk amplifying harm as fast as we’re scaling performance. Even AI products that seem successful in narrow metrics like `number of users` and `engagement` can lead to a degradation of user trust in your company, reputational risk, and drive negative societal outcomes. A more systematic product development approach is necessary for this next generation of tech to ensure we get the benefits of AI without the downsides.
+
+**Project summary**
+
+This project introduces "Yellow Teaming", a structured methodology for stress-testing AI products by exploring the full spectrum of consequences when products succeed, not just fail. Students will create a YellowTeamGPT to analyze their Arm-based product concept and apply the learnings to make their product better. This exercise is an excellent way for software developers, product managers, and designers to elevate their products through thoughtful design choices above a crowded competitive landscape.
+
+The assistant will be integrated into your product development workflow (e.g. product brainstorming, feature planning reviews, coding sessions) to aid software teams in surfacing unintended effects of new product features on your company, your users, and society. Participants can implement their YellowTeamGPT using any LLM, from a private Llama3.1-8B model on an AWS instance (tutorial linked below) to a public ChatGPT/Claude/other chatbot. Participants can also Yellow Team without an LLM by applying the methodology themselves and documenting their analysis. Analysis can be documented as product design documents, sprint retrospectives, Git-based code reviews...anything that shows the results of their thoughtful design practices.
+
+Key Objectives of Your Project
+- Collect Real-World Applications: Gather detailed accounts of AI projects developed using Yellow Teaming principles on Arm-based systems.
+- Showcase Responsible AI Practices: Highlight how developers anticipate and address potential societal and ethical impacts of their AI solutions.
+- Promote Arm-Based AI Development: Demonstrate the capabilities and advantages of deploying AI applications on Arm architectures, such as AWS Graviton processors or smartphones.
+- Yellow Teaming Implementation: A detailed account of how Yellow Teaming was applied, including the tools/prompts used to facilitate analysis, identification of unintended consequences, strategies to mitigate negative product impacts, and/or new net-positive features ideated.
+
+
+## Prequisites
+
+If deploying a private Llama model ->
+- **Hardware**:
+ - Access to an Arm-based cloud instance, for example Arm-based Graviton4 processors.
+- **Software**:
+ - PyTorch and Hugging Face account
+ - `torchchat` repo and dependencies
+ - Hugging Face CLI for LLM download
+ - Git, Python 3.10+, and various common build essentials (e.g., `make`, `g++`)
+- **Skills**:
+ - Proficiency in Python and PyTorch
+ - [Hugging Face account](https://huggingface.co/)
+ - Understanding of LLMs and prompting techniques
+
+If using a public LLM ->
+- **Hardware**:
+ - None needed
+- **Software**:
+ - Access to a public LLM
+- **Skills**:
+ - Understanding of LLMs and prompting techniques
+
+## Resources from Arm and our partners
+
+- External Course: [Mitigating Harmful Consequences course module by the Center for Humane Technology](https://www.humanetech.com/course)
+- Blog: [Build Responsible AI products with your own Yellow Teaming LLM](https://pytorch.org/blog/build-responsible-ai-products-with-your-own-yellow-teaming-llm/)
+- Learning Paths: [AI Learning Paths](https://learn.arm.com/tag/ml)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md
index de5fcfe1..26f0be37 100644
--- a/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md
+++ b/docs/_posts/2025-05-30-Sentiment-Analysis-Dashboard.md
@@ -1,85 +1,86 @@
----
-title: Create a Sentiment Analysis Dashboard for Keywords Based on the Semiconductor Industry
-description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes.
-subjects:
-- ML
-- Web
-- Databases
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- Mobile, Graphics, and Gaming
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Hidden
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
- This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors).
-
- This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format.
-
- ## Prequisites
-
- - Languages: Intermediate understanding of Python
- - Hardware: Access to a computer with internet connectivity and access to cloud instances
-
- ## Resources from Arm and our partners
-
- You are free to choose your own implementation details. The resouces below are examples to get started.
-
- - External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/)
- - Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)
-
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-## Description
-This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors).
-
-This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format.
-
-## Prequisites
-
-- Languages: Intermediate understanding of Python
-- Hardware: Access to a computer with internet connectivity and access to cloud instances
-
-## Resources from Arm and our partners
-
-You are free to choose your own implementation details. The resouces below are examples to get started.
-
-- External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/)
-- Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)
-
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Sentiment-Analysis-Dashboard
+description: This self-service project builds a web-scraping, LLM-powered dashboard that tracks and visualizes sentiment trends across semiconductor-industry news, giving stakeholders a real-time pulse on market mood and emerging themes.
+subjects:
+- ML
+- Web
+- Databases
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- Mobile, Graphics, and Gaming
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+ This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors).
+
+ This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format.
+
+ ## Prequisites
+
+ - Languages: Intermediate understanding of Python
+ - Hardware: Access to a computer with internet connectivity and access to cloud instances
+
+ ## Resources from Arm and our partners
+
+ You are free to choose your own implementation details. The resouces below are examples to get started.
+
+ - External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/)
+ - Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)
+
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+## Description
+This project aims to develop a sentiment analysis dashboard for keywords related to the semiconductor industry. The main deliverable is a web scraping script that gathers text data from various semiconductor news sites. Example sites are [SemiconductorEngineering.com](https://semiengineering.com/), [IEEE Spectrum](https://spectrum.ieee.org/), [EETimes](https://www.eetimes.com/tag/semiconductors/), [SemiconductorDigest](https://www.semiconductor-digest.com/), [SemiconductorToday](https://semiconductor-today.com/), [Financial Times - Semiconductors](https://www.ft.com/semiconductors), [Bezinga Semiconductors](https://www.benzinga.com/topic/semiconductors).
+
+This data will then be processed through a sentiment analysis LLM (Large Language Model) to determine the sentiment of the content and how it varies over time. The project will provide practical experience in web scraping, data processing, databases and using LLMs for sentiment analysis. The final output will be a functional dashboard that displays the sentiment analysis results in an easy-to-understand format.
+
+## Prequisites
+
+- Languages: Intermediate understanding of Python
+- Hardware: Access to a computer with internet connectivity and access to cloud instances
+
+## Resources from Arm and our partners
+
+You are free to choose your own implementation details. The resouces below are examples to get started.
+
+- External Documentation: [BeautifulSoup](https://pypi.org/project/beautifulsoup4/)
+- Learning Paths: [Deploy MariaDB](https://learn.arm.com/learning-paths/servers-and-cloud-computing/mariadb/)
+
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md
index ee40068c..3310c49c 100644
--- a/docs/_posts/2025-05-30-Smart-Voice-Assistant.md
+++ b/docs/_posts/2025-05-30-Smart-Voice-Assistant.md
@@ -1,91 +1,94 @@
----
-title: Smart Voice Assistant Using TinyML on Cortex-M55
-description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use.
-subjects:
-- ML
-requires-team:
-- No
-platform:
-- IoT
-- Embedded and Microcontrollers
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Hidden
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
- This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference.
-
- The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system.
-
- ## Prequisites
-
- - Languages: Python, C++, Embedded C
- - Tooling: TensorFlow Lite for Microcontrollers, Keil MDK
- - Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers)
-
-
- ## Resources from Arm and our partners
-
- - Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
- - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
- - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
- - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
- - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference.
-
-The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system.
-
-## Prequisites
-
-- Languages: Python, C++, Embedded C
-- Tooling: TensorFlow Lite for Microcontrollers, Keil MDK
-- Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers)
-
-
-## Resources from Arm and our partners
-
-- Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
-- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
-- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
-- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
-- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Smart-Voice-Assistant
+description: This project trains and deploys a TinyML keyword-spotting model on an Arm Cortex-M55/U55 board to create a low-power voice assistant that recognizes spoken commands and quantifies its accuracy, latency, and energy use.
+subjects:
+- ML
+requires-team:
+- No
+platform:
+- IoT
+- Embedded and Microcontrollers
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+ This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference.
+
+ The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system.
+
+ ## Prequisites
+
+ - Languages: Python, C++, Embedded C
+ - Tooling: TensorFlow Lite for Microcontrollers, Keil MDK
+ - Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers)
+
+
+ ## Resources from Arm and our partners
+
+ - Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
+ - Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
+ - Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
+ - Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
+ - Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+This project aims to develop a simple voice assistant that can recognize spoken commands such as “turn on the light,” “play music,” and other similar tasks. The voice assistant will be able to control peripheral devices accordingly. The main objective is to implement your design on a low-power microcontroller Cortex-M55/U55 to create low-level machine learning applications. You should look to access metrics such as the accuracy, power and computation time. Please refer our [Machine Learning keyword spotting example](https://github.com/Arm-Examples/mlek-cmsis-pack-examples) as a reference.
+
+The deliverables include a functional voice assistant capable of understanding and executing basic commands, along with documentation detailing the development process and the performance of the system.
+
+## Prequisites
+
+- Languages: Python, C++, Embedded C
+- Tooling: TensorFlow Lite for Microcontrollers, Keil MDK
+- Hardware: Cortex-M55/U55 development board (or Corstone Virtual Platform), microphone, peripheral devices (e.g., lights, speakers)
+
+
+## Resources from Arm and our partners
+
+- Learning paths: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
+- Learning paths: [Tutorials on CMSIS](https://learn.arm.com/tag/cmsis/)
+- Install Guide: [Keil Studio for VSCode](https://learn.arm.com/install-guides/keilstudio_vs/)
+- Book: ["A beginner's Guide to Designing Embedded System Applications on Arm Cortex-M Microcontrollers"](https://www.arm.com/resources/education/books)
+- Book: ["Arm Helium Technology M-Profile Vector Extensions (MVE)"](https://www.arm.com/resources/education/books)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md
index 09d7f974..29dc94cf 100644
--- a/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md
+++ b/docs/_posts/2025-05-30-SpecINT2017-benchmarking-on-Arm64.md
@@ -1,124 +1,127 @@
----
-title: 'SpecINT2017 Benchmarking on Arm64: Evaluating Compiler and Workload Performance'
-description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks.
-subjects:
-- Performance and Architecture
-- Migration to Arm
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ### Description
-
- **Why this is important?**
-
- SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications.
-
- **Project Summary**
-
- This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance.
-
- ## Prequisites
-
- Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests)
-
- Software: Familiarity with performance engineering and a OOP with a language such as C++.
-
- Compilers: GCC, LLVM/Clang, Arm Compiler for Linux
-
- Profiling Tools: perf, Arm Performance Libraries
-
- Workloads: SPEC CPU2017 (academic license required), custom workloads
-
- ## Resources from Arm and our partners
-
- - Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf)
-
- - Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology)
-
- - Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/)
-
- - Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters)
-
- - Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/)
-
- - Documentation: [GNU compilers](https://gcc.gnu.org/)
-
- - Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-### Description
-
-**Why this is important?**
-
-SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications.
-
-**Project Summary**
-
-This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance.
-
-## Prequisites
-
-Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests)
-
-Software: Familiarity with performance engineering and a OOP with a language such as C++.
-
-Compilers: GCC, LLVM/Clang, Arm Compiler for Linux
-
-Profiling Tools: perf, Arm Performance Libraries
-
-Workloads: SPEC CPU2017 (academic license required), custom workloads
-
-## Resources from Arm and our partners
-
-- Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf)
-
-- Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology)
-
-- Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/)
-
-- Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters)
-
-- Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/)
-
-- Documentation: [GNU compilers](https://gcc.gnu.org/)
-
-- Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: SpecINT2017-benchmarking-on-Arm64
+description: This self-service project profiles SPEC CPU2017 on Arm64 servers—using GCC, Clang, and Arm Compiler with top-down analysis—to reveal how compiler choices and Arm micro-architectural features impact execution time, energy efficiency, and performance bottlenecks.
+subjects:
+- Performance and Architecture
+- Migration to Arm
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ### Description
+
+ **Why this is important?**
+
+ SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications.
+
+ **Project Summary**
+
+ This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance.
+
+ ## Prequisites
+
+ Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests)
+
+ Software: Familiarity with performance engineering and a OOP with a language such as C++.
+
+ Compilers: GCC, LLVM/Clang, Arm Compiler for Linux
+
+ Profiling Tools: perf, Arm Performance Libraries
+
+ Workloads: SPEC CPU2017 (academic license required), custom workloads
+
+ ## Resources from Arm and our partners
+
+ - Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf)
+
+ - Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology)
+
+ - Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/)
+
+ - Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters)
+
+ - Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/)
+
+ - Documentation: [GNU compilers](https://gcc.gnu.org/)
+
+ - Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+### Description
+
+**Why this is important?**
+
+SPEC is an industry standard for assessing the performance of both single-threaded and multi-threaded applications across various data types and compilers. This synthetic workload accurately represents real-world tasks and serves as a common metric for evaluating platform choices. Therefore, it is essential to comprehend the inner workings of these benchmarks to identify which microarchitectural features can enhance end-user applications.
+
+**Project Summary**
+
+This project aims to replicate the characterisation study from "SPEC CPU2017: Performance, Event, and Energy Characterization on the Core i7-8700K" on an Arm64 platform (e.g., Ampere Altra, AWS Graviton) using different compilers and performance profiling tools. The study will analyze how compiler optimizations and architectural features affect execution time, energy efficiency, and instruction throughput on Arm-based server processors. Deliverables include a comprehensive performance analysis report, reproducible benchmarking scripts, and a dataset comparing performance across different configurations. The report should locate microarchitectural bottlenecks using the [top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology), compiler performance and recommendations on how to improve performance.
+
+## Prequisites
+
+Hardware: Access to Arm64-based server (Ampere Altra, AWS Graviton, Raspberry Pi for preliminary tests)
+
+Software: Familiarity with performance engineering and a OOP with a language such as C++.
+
+Compilers: GCC, LLVM/Clang, Arm Compiler for Linux
+
+Profiling Tools: perf, Arm Performance Libraries
+
+Workloads: SPEC CPU2017 (academic license required), custom workloads
+
+## Resources from Arm and our partners
+
+- Research Article: [Characterisation Paper on x86](https://research.spec.org/icpe_proceedings/2019/proceedings/p111.pdf)
+
+- Whitepaper: [Arm Top-down methodology](https://developer.arm.com/documentation/109542/0100/Arm-Topdown-methodology)
+
+- Install Guide:[Install Perf for Linux on Arm](https://learn.arm.com/install-guides/perf/)
+
+- Documentation: [Arm Performance Counters](https://developer.arm.com/documentation/ddi0379/a/Introduction/Performance-counters)
+
+- Documentation: [SPEC CPU2017 ](https://www.spec.org/cpu2017/results/)
+
+- Documentation: [GNU compilers](https://gcc.gnu.org/)
+
+- Software Download: [Arm compiler for Linux](https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-Write-A-Learning-Path.md b/docs/_posts/2025-05-30-Write-A-Learning-Path.md
index 27bf0785..3dbef32d 100644
--- a/docs/_posts/2025-05-30-Write-A-Learning-Path.md
+++ b/docs/_posts/2025-05-30-Write-A-Learning-Path.md
@@ -1,80 +1,81 @@
----
-title: Write an Educational Tutorial (Learning Path) of your Choice
-description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills.
-subjects:
-- Libraries
-- Web
-requires-team:
-- No
-platform:
-- Servers and Cloud Computing
-- Laptops and Desktops
-- Mobile, Graphics, and Gaming
-- Automotive
-- IoT
-- Embedded and Microcontrollers
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-05-30
-license:
-status:
-- Hidden
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
- This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions!
-
- ## Prequisites
-
- - Computer with Internet Connectivity
-
- ## Resources from Arm and our partners
-
- - Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/)
- - Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions!
-
-## Prequisites
-
-- Computer with Internet Connectivity
-
-## Resources from Arm and our partners
-
-- Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/)
-- Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Write-A-Learning-Path
+description: This project lets students turn their Arm expertise into a publish-ready Learning Path—creating a structured, hands-on tutorial that guides others through a complete, hardware-friendly build and showcases the author’s teaching skills.
+subjects:
+- Libraries
+- Web
+requires-team:
+- No
+platform:
+- Servers and Cloud Computing
+- Laptops and Desktops
+- Mobile, Graphics, and Gaming
+- Automotive
+- IoT
+- Embedded and Microcontrollers
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-05-30
+license:
+status:
+- Hidden
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+ This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions!
+
+ ## Prequisites
+
+ - Computer with Internet Connectivity
+
+ ## Resources from Arm and our partners
+
+ - Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/)
+ - Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+This exciting opportunity invites students to create and submit a comprehensive learning path focused on any aspect of Arm technology. Participants will have the chance to showcase their expertise, contribute to the academic community, and potentially publish their work for a wider audience. Learning paths are structured tutorials that guide learners through a series of topics and skills. For example, a project could involve developing a learning path on Arm-based embedded systems, culminating in a practical demonstration of a working prototype, such as a simple temperature monitoring system using a basic microcontroller and a few sensors. This project can be easily recreated with minimal hardware and software access. We look forward to seeing your innovative and insightful submissions!
+
+## Prequisites
+
+- Computer with Internet Connectivity
+
+## Resources from Arm and our partners
+
+- Documentation: [How to create a learning path](https://learn.arm.com/learning-paths/cross-platform/_example-learning-path/)
+- Documentation: (https://github.com/ArmDeveloperEcosystem/arm-learning-paths/discussions/categories/ideas-for-new-learning-paths)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-05-30-projects.md b/docs/_posts/2025-05-30-projects.md
index f2bb51f4..405c8ffc 100644
--- a/docs/_posts/2025-05-30-projects.md
+++ b/docs/_posts/2025-05-30-projects.md
@@ -1,13 +1,14 @@
----
-title: 'Find industry relevant challenges on Arm '
-filter: project
-publication-date: 2025-05-30
-layout: article
-full_description: |-
- **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike.
-
- Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/).
----
-**Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike.
-
+---
+title: projects
+filter: project
+publication-date: 2025-05-30
+layout: article
+full_description: |-
+ **Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike.
+
+ Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/).
+---
+
+**Arm Developer Labs** is a repository of software and hardware project suggestions sourced from internal Arm teams and our network of eco-system partners but framed in way to be immediately accessible and useful to both academics and professional software developers alike.
+
Filter or keyword search below. You can take a project as is, adapt it to your circumstances or simply take inspiration from it. Each project link shows what contextual collateral and resources are available. To get benefits from Arm, you need to tell us about your work. Read more on the [homepage](https://arm-university.github.io/Arm-Developer-Labs/).
\ No newline at end of file
diff --git a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md
index 6b762fee..5b2dea5c 100644
--- a/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md
+++ b/docs/_posts/2025-07-11-C-Based-Application-from-Scratch.md
@@ -1,103 +1,206 @@
----
-title: Create a minimal C-Based Project for Raspberry Pi
-description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible.
-subjects:
-- Performance and Architecture
-- Libraries
-requires-team:
-- No
-platform:
-- IoT
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-07-11
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
-
- **Why this is important?**
-
- Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high.
-
- **Project Summary**
-
- This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count.
-
- Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission.
-
- Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions.
-
-
- ## Prerequisites
-
- - Access to a Raspberry Pi device (any generation)
- - Intermediate Understanding of the C language
-
-
- ## Resources from Arm and our partners
-
- - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html).
- - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
-
- ### Previous Submissions
- 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git).
- 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game).
----
-## Description
-
-**Why this is important?**
-
-Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high.
-
-**Project Summary**
-
-This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count.
-
-Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission.
-
-Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions.
-
-
-## Prerequisites
-
-- Access to a Raspberry Pi device (any generation)
-- Intermediate Understanding of the C language
-
-
-## Resources from Arm and our partners
-
-- External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html).
-- External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
-
-### Previous Submissions
-1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git).
+---
+title: C-Based-Application-from-Scratch
+description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible.
+subjects:
+- Performance and Architecture
+- Libraries
+requires-team:
+- No
+platform:
+- IoT
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-07-11
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+
+ **Why this is important?**
+
+ Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high.
+
+ **Project Summary**
+
+ This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count.
+
+ Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission.
+
+ Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions.
+
+
+ ## Prerequisites
+
+ - Access to a Raspberry Pi device (any generation)
+ - Intermediate Understanding of the C language
+
+
+ ## Resources from Arm and our partners
+
+ - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html).
+ - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+
+ ### Previous Submissions
+ 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git).
+ 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game).
+---
+
+## Description
+
+**Why this is important?**
+
+Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high.
+
+**Project Summary**
+
+This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count.
+
+Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission.
+
+Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions.
+
+
+## Prerequisites
+
+- Access to a Raspberry Pi device (any generation)
+- Intermediate Understanding of the C language
+
+
+## Resources from Arm and our partners
+
+- External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html).
+- External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+
+### Previous Submissions
+1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git).
+---
+title: Create a minimal C-Based Project for Raspberry Pi
+description: This self-service project goes back to the fundamentals. The challenge is to develop an application of your choice but your are only permitted to use the C language with as few dependencies as possible.
+subjects:
+- Performance and Architecture
+- Libraries
+requires-team:
+- No
+platform:
+- IoT
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-07-11
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+
+ **Why this is important?**
+
+ Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high.
+
+ **Project Summary**
+
+ This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count.
+
+ Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission.
+
+ Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions.
+
+
+ ## Prerequisites
+
+ - Access to a Raspberry Pi device (any generation)
+ - Intermediate Understanding of the C language
+
+
+ ## Resources from Arm and our partners
+
+ - External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html).
+ - External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+
+ ### Previous Submissions
+ 1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git).
+ 2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game).
+---
+## Description
+
+**Why this is important?**
+
+Modern, higher-level managed languages such as Java and Python enabling developers to stand up complex applications quickly. However, this often comes at the expense of configurability and performance. Developing low-level skills are valuable to unlocking performance but also are crucial to understanding the principle mechanisms in computer architecture and how to write programs that leverage available hardware features. At Arm, we have a history of developing low-level software components, such as compilers, as well as contributing to software projects, such as the Linux Kernel. The demand for developers with these skills is high.
+
+**Project Summary**
+
+This project asks you to develop an application of your choice that runs on any generation of Raspberry Pi device. The majority of implementation code must be written in C (any ISO-standard version), and we will measure your submission by counting only the lines of C source files. Auxiliary files, such as Makefiles, JSON configuration files, etc., are excluded from this line count.
+
+Further, we recommend that you keep the number of external dependencies to a minimum, writing any libraries from scratch where suitable. This is excluding those provided by the C standard library. You are also free to use any suitable compiler. If you use a difference language or a dependency written in another language, please include a short justification in your submission.
+
+Be creative! This challenge is open ended and we are looking for submissions that show creativity and novel solutions.
+
+
+## Prerequisites
+
+- Access to a Raspberry Pi device (any generation)
+- Intermediate Understanding of the C language
+
+
+## Resources from Arm and our partners
+
+- External Resource: [Getting started with your Raspberry Pi](https://www.raspberrypi.com/documentation/computers/getting-started.html).
+- External Resource: [C language documentation](https://en.cppreference.com/w/c/language.html)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
+
+To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+
+### Previous Submissions
+1. [Plant Health Analysis System, Arnav Gupta et al. Imperial College London](https://github.com/Arg2006/ARM_Presentation.git).
2. [VR Voxel Game. Imperial College London](https://github.com/lxkast/vr-voxel-game).
\ No newline at end of file
diff --git a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md
index 73f72440..0cc26757 100644
--- a/docs/_posts/2025-08-28-NPC-LLM-Runtime.md
+++ b/docs/_posts/2025-08-28-NPC-LLM-Runtime.md
@@ -1,119 +1,123 @@
----
-title: On-Device LLMs for Real-Time NPC Interaction in Games
-description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions.
-subjects:
-- ML
-- Gaming
-- Graphics
-requires-team:
-- No
-platform:
-- AI
-- Mobile, Graphics, and Gaming
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-08-28
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
-
- **Why is this important?**
-
- Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical.
-
- **Project Summary**
-
- This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video.
-
- To qualify, your submission should include, where possible:
-
- - Source code (with clear documentation and build instructions)
- - A reproducible setup (e.g. scripts, datasets, or dependencies)
- - A supporting document describing the project and design decisions
- - High-quality images and a video (≤ 3 minutes) demonstrating the demo in action
-
- Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution.
-
- ## Prequisites
- - Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot)
- - Experience with integrating machine learning models into real-time applications
- - Knowledge of C++, Python, or a game scripting language
- - Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment)
- - Access to hardware capable of running LLM inference locally (PC or mobile)
-
-
- ## Resources from Arm and our partners
-
- - Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019)
- - Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365)
- - Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/)
- - Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/)
- - Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039)
- - Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659)
-
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-## Description
-
-**Why is this important?**
-
-Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical.
-
-**Project Summary**
-
-This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video.
-
-To qualify, your submission should include, where possible:
-
-- Source code (with clear documentation and build instructions)
-- A reproducible setup (e.g. scripts, datasets, or dependencies)
-- A supporting document describing the project and design decisions
-- High-quality images and a video (≤ 3 minutes) demonstrating the demo in action
-
-Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution.
-
-## Prequisites
-- Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot)
-- Experience with integrating machine learning models into real-time applications
-- Knowledge of C++, Python, or a game scripting language
-- Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment)
-- Access to hardware capable of running LLM inference locally (PC or mobile)
-
-
-## Resources from Arm and our partners
-
-- Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019)
-- Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365)
-- Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/)
-- Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/)
-- Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039)
-- Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659)
-
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: NPC-LLM-Runtime
+description: This self-service project explores novel ways of integrating Large Language Models (LLMs) into real-time gameplay to drive dynamic Non-Playable Character (NPC) interactions.
+subjects:
+- ML
+- Gaming
+- Graphics
+requires-team:
+- No
+platform:
+- AI
+- Mobile, Graphics, and Gaming
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-08-28
+license:
+status:
+- Published
+badges: trending
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+
+ **Why is this important?**
+
+ Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical.
+
+ **Project Summary**
+
+ This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video.
+
+ To qualify, your submission should include, where possible:
+
+ - Source code (with clear documentation and build instructions)
+ - A reproducible setup (e.g. scripts, datasets, or dependencies)
+ - A supporting document describing the project and design decisions
+ - High-quality images and a video (≤ 3 minutes) demonstrating the demo in action
+
+ Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution.
+
+ ## Prequisites
+ - Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot)
+ - Experience with integrating machine learning models into real-time applications
+ - Knowledge of C++, Python, or a game scripting language
+ - Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment)
+ - Access to hardware capable of running LLM inference locally (PC or mobile)
+
+
+ ## Resources from Arm and our partners
+
+ - Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019)
+ - Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365)
+ - Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/)
+ - Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/)
+ - Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039)
+ - Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659)
+
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+## Description
+
+**Why is this important?**
+
+Large Language Models (LLMs) are redefining the possibilities for interactive game experiences, especially in how players engage with Non-Playable Characters (NPCs). Traditional NPCs rely on pre-scripted dialogue trees and behavior systems, which limit immersion and adaptability. By leveraging LLMs locally, we can move beyond static interactions and create dynamic, real-time NPC behaviors that respond intelligently to players. This not only enhances immersion but also demonstrates the feasibility of running advanced AI directly on-device without reliance on cloud infrastructure. Such innovation could shape the future of gaming, particularly in mobile platforms where performance and autonomy are critical.
+
+**Project Summary**
+
+This project challenges you to propose and implement novel methods of using LLMs to control or interact with NPCs during runtime. Beyond dialogue systems, the focus is on new, creative mechanisms that showcase the potential of LLM-driven NPCs in real-time gameplay. Your implementation should demonstrate a working prototype running locally at a stable 30–60 FPS, ensuring both responsiveness and playability. While achieving mobile deployment is the ultimate goal, a PC/laptop demo will also be accepted as proof of concept. The final submission should include source code, reproducible build instructions, supporting documentation, and a short demo video.
+
+To qualify, your submission should include, where possible:
+
+- Source code (with clear documentation and build instructions)
+- A reproducible setup (e.g. scripts, datasets, or dependencies)
+- A supporting document describing the project and design decisions
+- High-quality images and a video (≤ 3 minutes) demonstrating the demo in action
+
+Please ensure your contribution contains no confidential material and that you have rights to share it, especially if affiliated with an academic or commercial institution.
+
+## Prequisites
+- Familiarity with a modern game engine (e.g., Unity, Unreal Engine, Godot)
+- Experience with integrating machine learning models into real-time applications
+- Knowledge of C++, Python, or a game scripting language
+- Understanding of on-device ML optimization (e.g., quantization, pruning, mobile deployment)
+- Access to hardware capable of running LLM inference locally (PC or mobile)
+
+
+## Resources from Arm and our partners
+
+- Blog: [The Future of Interaction with Mobile Game Characters](https://dl.acm.org/doi/10.1145/3641234.3671019)
+- Blog: [The Future is Here: Verbal Interaction with NPCs on Mobile](https://doi.org/10.1145/3664294.3664365)
+- Blog: [Google AI for game developers- Google Developers Blog](https://developers.googleblog.com/en/google-ai-for-game-developers/)
+- Webinar: [Generative AI in Game Development](https://meet95924766.adobeconnect.com/pkevz158x6tp/)
+- Paper: [A Survey on Large Language Model-Based Game Agents ](https://arxiv.org/abs/2404.02039)
+- Paper: [Large Language Models and Games: A Survey and Roadmap](https://arxiv.org/abs/2402.18659)
+
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-11-03-Python-Porting-Challenge.md b/docs/_posts/2025-11-03-Python-Porting-Challenge.md
index 0e922cf0..6cae97b5 100644
--- a/docs/_posts/2025-11-03-Python-Porting-Challenge.md
+++ b/docs/_posts/2025-11-03-Python-Porting-Challenge.md
@@ -1,118 +1,119 @@
----
-title: Python Package Porting Challenge
-description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support.
-subjects:
-- Libraries
-requires-team:
-- No
-platform:
-- Laptops and Desktops
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-11-03
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
- ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility*
-
- ## Description
-
- Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry.
-
- This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments.
-
- Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully.
-
- Key Objectives:
-
- - Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/).
- - Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly.
- - Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated.
-
-
- ## Prequisites
-
- - Intermediate to advance understanding of the Python language
- - Some experience on creating python packages and continuous integration testing.
- - If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.).
-
- ## Resources from Arm and our partners
-
- - External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
- - External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/)
- - External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/)
- - Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/)
- - Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524)
- - External Documentation: [Status of Python versions](https://devguide.python.org/versions/)
- - GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel)
-
-
- ## Support Level
-
- If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com.
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
- To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility*
-
-## Description
-
-Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry.
-
-This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments.
-
-Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully.
-
-Key Objectives:
-
-- Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/).
-- Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly.
-- Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated.
-
-
-## Prequisites
-
-- Intermediate to advance understanding of the Python language
-- Some experience on creating python packages and continuous integration testing.
-- If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.).
-
-## Resources from Arm and our partners
-
-- External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
-- External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/)
-- External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/)
-- Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/)
-- Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524)
-- External Documentation: [Status of Python versions](https://devguide.python.org/versions/)
-- GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel)
-
-
-## Support Level
-
-If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com.
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in preferential internal referrals to Arm Talent Acquisition (with digital badges for CV building). And we are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
+---
+title: Python-Porting-Challenge
+description: This challenge focuses on enabling Python support for Windows on Arm (WoA) to improve developer experience. While Python is widely used in research and industry, many popular packages—such as Pandas—still lack pre-built WoA binaries (win_arm64 wheels). The goal is to validate and optimise third-party packages, fix compatibility issues, and collaborate with maintainers to upstream WoA support.
+subjects:
+- Libraries
+requires-team:
+- No
+platform:
+- Laptops and Desktops
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-11-03
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+ ### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility*
+
+ ## Description
+
+ Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry.
+
+ This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments.
+
+ Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully.
+
+ Key Objectives:
+
+ - Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/).
+ - Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly.
+ - Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated.
+
+
+ ## Prequisites
+
+ - Intermediate to advance understanding of the Python language
+ - Some experience on creating python packages and continuous integration testing.
+ - If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.).
+
+ ## Resources from Arm and our partners
+
+ - External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+ - External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/)
+ - External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/)
+ - Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/)
+ - Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524)
+ - External Documentation: [Status of Python versions](https://devguide.python.org/versions/)
+ - GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel)
+
+
+ ## Support Level
+
+ If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com.
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+### *We are open to supporting participants through hardware donations, such as gift cards to help procure Windows on Arm laptops for development and research purposes. Please reach out to us at Arm-Developer-Labs@arm.com for more details on eligibility*
+
+## Description
+
+Windows on Arm brings the power of Arm architecture to Windows users, delivering energy efficiency and performance for a wide range of devices. As adoption grows, ensuring a seamless developer experience is critical, especially for Python, one of the most widely used languages in research, education, and industry.
+
+This challenge is on advancing the Arm ecosystem by addressing critical gaps in software enablement. A key objective is enabling Windows on Arm (WoA) across the WoA Python package ecosystem, which involves validating and optimising 3rd party packages to ensure seamless functionality across diverse environments.
+
+Despite Python’s widespread adoption, many popular packages such as Pandas still lack pre-built Windows on Arm binaries (`win_arm64` wheels), creating a barrier for developers and researchers who rely on these tools for data analysis and scientific computing. If a Windows on Arm Python developer wanted to use Pandas, they would have to recompile from source, which required the correct toolchain and is not guaranteed to compile or run successfully.
+
+Key Objectives:
+
+- Python Ecosystem Enablement: Turn at least 5 amber projects green on [Windows Arm64 Wheels](https://tonybaloney.github.io/windows-arm64-wheels/).
+- Identify packages that do not have readily available `win_arm64` wheels. Identify any bugs or regressions when porting to application (for example, `x86` instrinsics) and create a patch that resolves issues and enables the packages to correctly build and run performantly.
+- Community Collaboration: Engage with global developer communities, such as Python package maintainers, to get WoA package support upstreamed and integrated.
+
+
+## Prequisites
+
+- Intermediate to advance understanding of the Python language
+- Some experience on creating python packages and continuous integration testing.
+- If you decide to tackle non pure-python packages that are written in other languages, you will need an intermediate understanding of the language the program was written in (e.g., Rust, Java, C++ etc.).
+
+## Resources from Arm and our partners
+
+- External Documentation: [Methods to run Windows on Arm64](https://linaro.atlassian.net/wiki/spaces/WOAR/pages/29005479987/Windows+on+Arm+Environments)
+- External Documentation: [Python Packages without Windows-on-Arm Wheels](https://tonybaloney.github.io/windows-arm64-wheels/)
+- External Documentation: [Python Packages without Windows-on-Arm Wheels additional resource](http://www.winarm64wheels.com/)
+- Learning Path: [Sampling CPython with WindowsPerf](https://learn.arm.com/learning-paths/laptops-and-desktops/windowsperf_sampling_cpython/)
+- Community Post: [Python on Windows Arm64](https://discuss.python.org/t/python-on-windows-arm64/104524)
+- External Documentation: [Status of Python versions](https://devguide.python.org/versions/)
+- GitHub Repository: [Source code and documentation to build Python Wheels](https://github.com/pypa/cibuildwheel)
+
+
+## Support Level
+
+If you would like to request a small donation to help procure Windows on Arm hardware, please reach out to us at Arm-Developer-Lab@arm.com.
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
To receive the benefits, you must share your contribution through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md
index f236fec1..512a3d63 100644
--- a/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md
+++ b/docs/_posts/2025-11-27-Always-On-AI-with-Ethos-U85-NPU.md
@@ -1,136 +1,137 @@
----
-title: 'Edge AI with NPU: always-on-AI with ExecuTorch on Cortex-M55 + Ethos-U85 → Cortex-A'
-description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks.
-subjects:
-- ML
-- Performance and Architecture
-- Embedded Linux
-- RTOS Fundamentals
-requires-team:
-- No
-platform:
-- IoT
-- Embedded and Microcontrollers
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-11-27
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
-
- ## Description
-
- **Why is this important?**
-
- The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy:
- - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies.
- - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance.
-
- This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources.
-
- **Project Summary**
-
- Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements:
-
- 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers.
- 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference.
- 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring).
-
- *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.*
-
- Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices.
-
- You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved.
-
- Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve?
-
- ## What will you use?
- You should either be familiar with, or willing to learn about, the following:
- - Programming: Python, C++, Embedded C
- - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA.
- - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS
-
-
- ## Resources from Arm and our partners
- - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
- - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
- - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
- - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
- - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-
-## Description
-
-**Why is this important?**
-
-The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy:
-- A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies.
-- When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance.
-
-This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources.
-
-**Project Summary**
-
-Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements:
-
-1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers.
-2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference.
-3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring).
-
-*Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.*
-
-Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices.
-
-You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved.
-
-Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve?
-
-## What will you use?
-You should either be familiar with, or willing to learn about, the following:
-- Programming: Python, C++, Embedded C
-- ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA.
-- Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS
-
-
-## Resources from Arm and our partners
-- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
-- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
-- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
-- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
-- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
+---
+title: Always-On-AI-with-Ethos-U85-NPU
+description: The vision of Edge AI compute is to embed low-power intelligent sensing, perception, and decision systems everywhere. A low-power always-on-AI island continuously monitors sensory inputs to detect triggers. When a trigger is detected, it wakes up a more capable processor to carry out high-value inference, interaction, or control tasks.
+subjects:
+- ML
+- Performance and Architecture
+- Embedded Linux
+- RTOS Fundamentals
+requires-team:
+- No
+platform:
+- IoT
+- Embedded and Microcontrollers
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-11-27
+license:
+status:
+- Published
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+
+ ## Description
+
+ **Why is this important?**
+
+ The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy:
+ - A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies.
+ - When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance.
+
+ This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources.
+
+ **Project Summary**
+
+ Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements:
+
+ 1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers.
+ 2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference.
+ 3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring).
+
+ *Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.*
+
+ Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices.
+
+ You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved.
+
+ Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve?
+
+ ## What will you use?
+ You should either be familiar with, or willing to learn about, the following:
+ - Programming: Python, C++, Embedded C
+ - ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA.
+ - Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS
+
+
+ ## Resources from Arm and our partners
+ - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
+ - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
+ - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+ - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
+ - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+
+## Description
+
+**Why is this important?**
+
+The vision of Edge AI compute is to embed intelligent low-power sensing, perception, and decision systems everywhere (in homes, wearables, infrastructure) so devices can react to subtle cues, adapt to context, and wake higher-power systems only when needed. Rather than sending everything to the cloud or running full-scale models continuously, this Edge AI system operates as a layered hierarchy:
+- A low-power always-on-AI model continuously monitors sensory inputs (audio, motion, video) to detect triggers or anomalies.
+- When a trigger is detected, it wakes up a more capable processor (e.g. Cortex-A running a rich OS such as Linux) to carry out further tasks. This could be high-value inference, interaction, or control tasks. It could also involve connecting to other IoT devices or to a Neoverse cloud instance.
+
+This architecture is key to bridging the gap between battery-constrained devices and rich AI services, making systems smarter, more efficient, and responsive without draining resources.
+
+**Project Summary**
+
+Using equipment such as the Alif Ensemble development kit (e.g. E6/E8, which includes Cortex-A, Cortex-M55, and Ethos-U85 cores - or E4 (M55+U85) + Raspberry Pi for Cortex-A), and the ExecuTorch framework, build an Edge AI prototype that implements:
+
+1. A “wake-up” path: deploy a TOSA-compliant optimized model on the Cortex-M55 + Ethos-U85 pair to continuously monitor sensory signals (audio, motion, video) for wake-word, anomalies, or triggers.
+2. A subsequent workload path: when a trigger is detected, activate a Cortex-A core to perform more complex tasks, e.g. use an LLM optimised for CPU inference, connect to and manage other IoT devices, or connect to a Neoverse cloud instance for heavier inference.
+3. Evaluation and documentation: measure accuracy, latency, power consumption, robustness, and compare trade-offs between modalities (audio, video, motion). Demonstrate an end-to-end use case of your choice (e.g. smart assistant, anomaly alert system, gesture control, environment monitoring).
+
+*Note that the Cortex-A32 included on the Alif DevKits will not be suitable for LLM inference. If using the onboard core for the project, target cloud/IoT connectivity. For LLM inference, consider connecting a Raspberry Pi 5 or similar.*
+
+Example: Use a microphone input to detect “Hey Arm”. After wake-up, launch an optimised LLM on Raspberry Pi Cortex-A to answer questions or control local devices.
+
+You are free to mix and match sensors, modalities, and tasks — as long as the core architecture (wake-on M55/U85, main task on A) is preserved.
+
+Many of these DevKits come with additional Ethos-U55 NPUs onboard - feel free to be creative and distribute different tasks across the different NPUs - what use-cases and applications can you achieve?
+
+## What will you use?
+You should either be familiar with, or willing to learn about, the following:
+- Programming: Python, C++, Embedded C
+- ExecuTorch, plus knowledge of model quantization, pruning, conversion. Use of the Vela compiler and TOSA.
+- Edge/Embedded development: bare-metal or RTOS (e.g. Zephyr), and embedded Linux (e.g. Yocto) or Raspberry Pi OS
+
+
+## Resources from Arm and our partners
+- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
+- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
+- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
+- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md
index 86e72c22..b7640d8a 100644
--- a/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md
+++ b/docs/_posts/2025-11-27-Edge-AI-On-Mobile.md
@@ -1,131 +1,131 @@
----
-title: 'SME2 on vivo X300: Mobile Edge AI Projects for multi-modal inference, built on Arm Lumex'
-description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required.
-subjects:
-- ML
-- Performance and Architecture
-- Libraries
-requires-team:
-- No
-platform:
-- Mobile, Graphics, and Gaming
-- AI
-- IoT
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-11-27
-license:
-status:
-- Published
-layout: article
-sidebar:
- nav: projects
-full_description: |-
- ## Description
-
- ### Why is this important?
-
- SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device.
-
- [SME2](https://www.arm.com/technologies/sme2)
-
- The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases.
-
- [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones)
-
- ### Project Summary
-
- Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2.
-
- Example project areas:
- - Real-time video semantic segmentation (e.g., background removal + AR compositing)
- - Live object detection + natural-language description (text summary of what the camera sees)
- - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition
- - On-device lightweight LLM or encoder-only transformer processing for mobile assistants
-
- Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case.
-
- Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2.
-
- ---
-
- ## Resources from Arm and our partners
-
- - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile)
- - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/)
- - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/)
- - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/)
- - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/)
- - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
- - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm)
-
- ---
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
-
- ---
----
-## Description
-
-### Why is this important?
-
-SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device.
-
-[SME2](https://www.arm.com/technologies/sme2)
-
-The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases.
-
-[vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones)
-
-### Project Summary
-
-Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2.
-
-Example project areas:
- - Real-time video semantic segmentation (e.g., background removal + AR compositing)
- - Live object detection + natural-language description (text summary of what the camera sees)
- - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition
- - On-device lightweight LLM or encoder-only transformer processing for mobile assistants
-
-Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case.
-
-Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2.
-
----
-
-## Resources from Arm and our partners
-
-- Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile)
-- Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/)
-- Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/)
-- Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/)
-- Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/)
-- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
-- Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm)
-
----
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
-To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
-
+---
+title: Edge-AI-On-Mobile
+description: Leverage the latest SME2 (Scalable Matrix Extension 2) available on the newest vivo X300 smartphones (built on Arm Lumex CSS) for advanced image/video, audio and text processing edge AI. Explore how SME2, via KleidiAI, enables larger matrix workloads, higher throughput, and novel applications on device without cloud connectivity required.
+subjects:
+- ML
+- Performance and Architecture
+- Libraries
+requires-team:
+- No
+platform:
+- Mobile, Graphics, and Gaming
+- AI
+- IoT
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-11-27
+license:
+status:
+- Published
+badges: trending
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+ ## Description
+
+ ### Why is this important?
+
+ SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device.
+
+ [SME2](https://www.arm.com/technologies/sme2)
+
+ The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases.
+
+ [vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones)
+
+ ### Project Summary
+
+ Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2.
+
+ Example project areas:
+ - Real-time video semantic segmentation (e.g., background removal + AR compositing)
+ - Live object detection + natural-language description (text summary of what the camera sees)
+ - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition
+ - On-device lightweight LLM or encoder-only transformer processing for mobile assistants
+
+ Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case.
+
+ Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2.
+
+ ---
+
+ ## Resources from Arm and our partners
+
+ - Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile)
+ - Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/)
+ - Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/)
+ - Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/)
+ - Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/)
+ - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+ - Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm)
+
+ ---
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+
+ ---
+---
+
+## Description
+
+### Why is this important?
+
+SME2 (Scalable Matrix Extension 2) is the latest CPU extension on Arm Lumex CSS. Designed to accelerate matrix-oriented compute workloads directly on device, SME2 improves AI/ML performance. This is by accelerating models that rely on operations like matrix multiplication, common in transformers, convolutional neural networks (CNNs), and large language models (LLMs). Via KleidiAI, SME2 is seamlessly integrated into frameworks such as ExecuTorch, LiteRT, ONNX Runtime so it is automatically leveraged for applications depending on whether SME2 is present on the host device.
+
+[SME2](https://www.arm.com/technologies/sme2)
+
+The vivo X300 is built on Arm Lumex. SME2 now enables AI compute that previously was too heavy or inaccessible on mobile. Developers can now utilise these advancements to deliver advanced applications on-device, reducing latency, increasing data privacy, and unlocking novel use-cases.
+
+[vivo X300, built on Arm Lumex](https://www.arm.com/company/success-library/vivo-x300-smartphones)
+
+### Project Summary
+
+Select a **mobile edge AI application** that benefits from large matrix operations, multi-modal fusion, or transformer-based processing enabled by SME2. Build and optimize a proof-of-concept application on a vivo X300 phone or other device supporting SME2.
+
+Example project areas:
+ - Real-time video semantic segmentation (e.g., background removal + AR compositing)
+ - Live object detection + natural-language description (text summary of what the camera sees)
+ - Multi-sensor fusion (camera + IMU + microphone) for gesture + voice recognition
+ - On-device lightweight LLM or encoder-only transformer processing for mobile assistants
+
+Identify a model architecture that maps to wide matrix operations (e.g., ViT, MLP-Mixer, multi-branch CNN with large FC layers). Utilise a mobile-friendly framework (e.g., ExecuTorch, LiteRT, ONNX Runtime, MediaPipe) to leverage SME2 optimizations. Optimize quantization, memory layout, and verify that the large matrix multiplications get scheduled efficiently on the SME2-enabled CPU. Build a mobile app (Android) that executes the model and utilises it for a compelling use-case.
+
+Utilise the resources and learning paths below and create an exciting and challenging application. Optionally, you could also compare performance vs a reference phone without SME2.
+
+---
+
+## Resources from Arm and our partners
+
+- Arm Developer: [Launchpad - Mobile AI](https://developer.arm.com/mobile-graphics-and-gaming/ai-mobile)
+- Learning Path: [Mobile AI/ML Performance Profiling](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/profiling-ml-on-arm/)
+- Learning Path: [Build an Android chat app with Llama, KleidiAI, ExecuTorch, and XNNPACK](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-llama3-chat-android-app-using-executorch-and-xnnpack/)
+- Learning Path: [Vision LLM Inference on Android with KleidiAI](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/vision-llm-inference-on-android-with-kleidiai-and-mnn/)
+- Learning Path: [Build a Hands-Free Selfie Android Application with MediaPipe](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/build-android-selfie-app-using-mediapipe-multimodality/)
+- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+- Arm / Cambridge University edX course: [AI at the Edge on Arm (Mobile)](https://www.edx.org/learn/computer-science/arm-education-ai-at-the-edge-on-arm)
+
+---
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+
---
\ No newline at end of file
diff --git a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md
index 08fc13d1..f3e0b88d 100644
--- a/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md
+++ b/docs/_posts/2025-11-27-Ethos-U85-NPU-Applications.md
@@ -1,212 +1,214 @@
----
-title: 'Ethos-U85 NPU Applications with TOSA Model Explorer: Exploring Next-Gen Edge AI Inference'
-description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs.
-subjects:
-- ML
-- Performance and Architecture
-requires-team:
-- No
-platform:
-- IoT
-- Embedded and Microcontrollers
-- AI
-sw-hw:
-- Software
-- Hardware
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-11-27
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
- ## Description
-
- **Why is this important?**
-
- The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices.
-
- This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85.
-
- [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85)
-
- **Project Summary**
-
- Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities.
-
- Your project should include:
-
- 1. Model Deployment and Optimization
- Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using:
- - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance.
- - The Vela compiler for optimization.
-
- These tools can be used to:
- - Convert and visualize model graphs in TOSA format.
- - Identify unsupported operators.
- - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting.
- - Run Vela for optimized compilation targeting Ethos-U85.
-
- 2. Application Demonstration
- Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include:
- - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification).
- - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks.
- - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding.
-
- 3. Analysis and Benchmarking
- Report quantitative results on:
- - Inference latency, throughput (FPS or tokens/s), and memory footprint.
- - Power efficiency under load (optional).
- - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits).
- - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion.
-
- ---
-
- ## What kind of projects should you target?
-
- To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria:
-
- - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models.
- - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms.
- - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput.
- - *Example:* 512×512 semantic segmentation or multi-object detection.
- - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance.
- - *Example:* large MLP heads or transformer token mixers.
- - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations.
-
- The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies.
-
- ---
-
- ## What will you use?
- You should be familiar with, or willing to learn about:
- - Programming: Python, C/C++
- - ExecuTorch or TensorFlow Lite (Micro/LiteRT)
- - Techniques for optimising AI models for the edge (quantization, pruning, etc.)
- - Optimization Tools:
- - TOSA Model Explorer
- - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch)
- - Vela compiler for Ethos-U
- - Bare-metal or RTOS (e.g., Zephyr)
-
- ---
-
- ## Resources from Arm and our partners
- - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
- - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
- - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
- - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
- - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model)
- - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
- ---
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-## Description
-
-**Why is this important?**
-
-The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices.
-
-This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85.
-
-[Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85)
-
-**Project Summary**
-
-Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities.
-
-Your project should include:
-
-1. Model Deployment and Optimization
- Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using:
- - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance.
- - The Vela compiler for optimization.
-
- These tools can be used to:
- - Convert and visualize model graphs in TOSA format.
- - Identify unsupported operators.
- - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting.
- - Run Vela for optimized compilation targeting Ethos-U85.
-
-2. Application Demonstration
- Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include:
- - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification).
- - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks.
- - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding.
-
-3. Analysis and Benchmarking
- Report quantitative results on:
- - Inference latency, throughput (FPS or tokens/s), and memory footprint.
- - Power efficiency under load (optional).
- - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits).
- - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion.
-
----
-
-## What kind of projects should you target?
-
-To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria:
-
-- Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models.
- - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms.
-- High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput.
- - *Example:* 512×512 semantic segmentation or multi-object detection.
-- Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance.
- - *Example:* large MLP heads or transformer token mixers.
-- Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations.
-
-The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies.
-
----
-
-## What will you use?
-You should be familiar with, or willing to learn about:
-- Programming: Python, C/C++
-- ExecuTorch or TensorFlow Lite (Micro/LiteRT)
-- Techniques for optimising AI models for the edge (quantization, pruning, etc.)
-- Optimization Tools:
- - TOSA Model Explorer
- - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch)
- - Vela compiler for Ethos-U
-- Bare-metal or RTOS (e.g., Zephyr)
-
----
-
-## Resources from Arm and our partners
-- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
-- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
-- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
-- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
-- Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model)
-- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
----
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
+---
+title: Ethos-U85-NPU-Applications
+description: Push the limits of Edge AI by deploying the heaviest inference applications possible on Ethos-U85. Students will explore transformer-based and TOSA-optimized workloads that demonstrate performance levels on the next-gen of Ethos NPUs.
+subjects:
+- ML
+- Performance and Architecture
+requires-team:
+- No
+platform:
+- IoT
+- Embedded and Microcontrollers
+- AI
+sw-hw:
+- Software
+- Hardware
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-11-27
+license:
+status:
+- Published
+badges: trending
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+ ## Description
+
+ **Why is this important?**
+
+ The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices.
+
+ This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85.
+
+ [Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85)
+
+ **Project Summary**
+
+ Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities.
+
+ Your project should include:
+
+ 1. Model Deployment and Optimization
+ Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using:
+ - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance.
+ - The Vela compiler for optimization.
+
+ These tools can be used to:
+ - Convert and visualize model graphs in TOSA format.
+ - Identify unsupported operators.
+ - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting.
+ - Run Vela for optimized compilation targeting Ethos-U85.
+
+ 2. Application Demonstration
+ Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include:
+ - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification).
+ - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks.
+ - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding.
+
+ 3. Analysis and Benchmarking
+ Report quantitative results on:
+ - Inference latency, throughput (FPS or tokens/s), and memory footprint.
+ - Power efficiency under load (optional).
+ - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits).
+ - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion.
+
+ ---
+
+ ## What kind of projects should you target?
+
+ To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria:
+
+ - Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models.
+ - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms.
+ - High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput.
+ - *Example:* 512×512 semantic segmentation or multi-object detection.
+ - Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance.
+ - *Example:* large MLP heads or transformer token mixers.
+ - Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations.
+
+ The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies.
+
+ ---
+
+ ## What will you use?
+ You should be familiar with, or willing to learn about:
+ - Programming: Python, C/C++
+ - ExecuTorch or TensorFlow Lite (Micro/LiteRT)
+ - Techniques for optimising AI models for the edge (quantization, pruning, etc.)
+ - Optimization Tools:
+ - TOSA Model Explorer
+ - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch)
+ - Vela compiler for Ethos-U
+ - Bare-metal or RTOS (e.g., Zephyr)
+
+ ---
+
+ ## Resources from Arm and our partners
+ - Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
+ - Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
+ - Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+ - Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
+ - Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model)
+ - PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
+ ---
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+## Description
+
+**Why is this important?**
+
+The Arm Ethos-U85 NPU represents a major leap in bringing *heavy inference* to constrained embedded systems. With its full transformer operator support, expanded MAC throughput, and native TOSA compatibility, the Ethos-U85 enables developers to deploy models and workloads that were previously too intensive for MCU-class devices.
+
+This project challenges you to explore the boundaries of what’s possible on Ethos-U85. The goal is to demonstrate inference performance and model complexity that is now achievable due to the architectural improvements and transformer acceleration capabilities of the Ethos-U85.
+
+[Ethos-U85 Launch](https://newsroom.arm.com/blog/ethos-u85)
+
+**Project Summary**
+
+Using hardware such as the Alif Ensemble E4/E6/E8 DevKits (all include Ethos-U85) or a comparable platform or Arm Fixed Virtual Platform Corstone-320, your task is to design and benchmark an advanced edge inference application that exploits the Ethos-U85’s compute and transformer capabilities.
+
+Your project should include:
+
+1. Model Deployment and Optimization
+ Select a computationally intensive model — ideally transformer-based or multi-branch convolutional — and deploy it on the Ethos-U85 using:
+ - The TOSA Model Explorer extension to inspect and adapt unsupported or experimental models for TOSA compliance.
+ - The Vela compiler for optimization.
+
+ These tools can be used to:
+ - Convert and visualize model graphs in TOSA format.
+ - Identify unsupported operators.
+ - Modify or substitute layers for compatibility using the Flatbuffers schema before re-exporting.
+ - Run Vela for optimized compilation targeting Ethos-U85.
+
+2. Application Demonstration
+ Implement a working example that highlights the Ethos-U85’s strengths in real-world inference. Possible categories include:
+ - Transformers on Edge: lightweight BERT, ViT, or audio transformers (e.g. speech or sound event classification).
+ - High-resolution Vision: semantic segmentation, object detection on large input sizes, or multi-head perception networks.
+ - Multi-modal Fusion: combining audio, image, or sensor streams for contextual understanding.
+
+3. Analysis and Benchmarking
+ Report quantitative results on:
+ - Inference latency, throughput (FPS or tokens/s), and memory footprint.
+ - Power efficiency under load (optional).
+ - Comparative performance versus Ethos-U55/U65 (use available benchmarks for reference or utilise the other Ethos-U NPUs provided in the Alif DevKits).
+ - The effect of TOSA optimization — demonstrate measurable improvements from graph conversion and operator fusion.
+
+---
+
+## What kind of projects should you target?
+
+To clearly demonstrate the leap from Ethos-U55/U65 to U85, choose projects that meet at least one of the following criteria:
+
+- Transformer-heavy architectures: e.g. attention blocks, transformer encoders, ViTs, or hybrid CNN+transformer models.
+ - *Example:* an audio event detection transformer that must process longer sequences or higher-resolution spectrograms.
+- High-resolution or multi-branch networks: models with high input dimensionality or multiple processing paths that saturate NPU throughput.
+ - *Example:* 512×512 semantic segmentation or multi-object detection.
+- Dense post-processing or large fully connected layers: cases where U55/U65 memory limits or MAC bandwidth previously restricted performance.
+ - *Example:* large MLP heads or transformer token mixers.
+- Multi-modal pipelines: combining multiple sensor inputs (e.g. image + IMU + audio) where the NPU must maintain concurrency or shared intermediate representations.
+
+The Ethos-U85 is ideal for projects where model performance is constrained by attention layers, large activations, or operator types that previously required fallback to the CPU. Use the Ethos-U85 to eliminate those fallbacks and achieve full-NPU execution of advanced topologies.
+
+---
+
+## What will you use?
+You should be familiar with, or willing to learn about:
+- Programming: Python, C/C++
+- ExecuTorch or TensorFlow Lite (Micro/LiteRT)
+- Techniques for optimising AI models for the edge (quantization, pruning, etc.)
+- Optimization Tools:
+ - TOSA Model Explorer
+ - .tflite to .tosa converter (if using Tensorflow rather than ExecuTorch)
+ - Vela compiler for Ethos-U
+- Bare-metal or RTOS (e.g., Zephyr)
+
+---
+
+## Resources from Arm and our partners
+- Arm Developer: [Edge AI](https://developer.arm.com/edge-ai)
+- Learning Path: [Navigating Machine Learning with Ethos-U processors](https://learn.arm.com/learning-paths/microcontrollers/nav-mlek/)
+- Repository: [AI on Arm course](https://github.com/arm-university/AI-on-Arm)
+- Example Board: [Alif Ensemble DevKit E8](https://www.keil.arm.com/boards/alif-semiconductor-devkit-e8-gen-1-2558a7b/features/)
+- Documentation: [TOSA Specification](https://www.mlplatform.org/tosa/), [TOSA Model Explorer](https://github.com/arm/tosa-adapter-model-explorer), and [TOSA Reference Model](https://gitlab.arm.com/tosa/tosa-reference-model)
+- PyTorch Blog: [ExecuTorch support for Ethos-U85](https://pytorch.org/blog/pt-executorch-ethos-u85/)
+---
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md
index 99c8e712..2693d42d 100644
--- a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md
+++ b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-&-Unreal-Engine.md
@@ -1,157 +1,159 @@
----
-title: Game development using Arm Neural Graphics with Unreal Engine
-description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline.
-subjects:
-- ML
-- Gaming
-- Libraries
-- Graphics
-requires-team:
-- No
-platform:
-- Mobile, Graphics, and Gaming
-- Laptops and Desktops
-- AI
-sw-hw:
-- Software
-support-level:
-- Self-Service
-- Arm Ambassador Support
-publication-date: 2025-11-27
-license:
-status:
-- Published
-donation:
-layout: article
-sidebar:
- nav: projects
-full_description: |-
-
-
- ## Description
-
- ### Why is this important?
-
- Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation.
-
- Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face.
-
- [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology)
-
- Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process.
-
- Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released.
-
- ### Project Summary
-
- Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate:
- - **Near-identical visuals at lower resolution** (render low → upscale with NSS)
-
- Document your progress and findings and consider alternative applications of the neural technology within games development.
-
- Attempt different environments and objects. For example:
-
- - Daytime vs night
- - Urban city, jungle forest, ocean floor, alien planet, building interiors
- - Complex lighting and shadows
- - NPCs with detailed clothing, faces, hair. Include animations.
-
- Make your scenes dynamic with particle effects, shadows, physics and motion.
-
- ---
-
- ## Pre-requisites
- - Laptop/PC/Mobile for Android Unreal Engine game development
- - Willingness to learn about games development and graphics, and the increasing use of AI in these fields.
-
- ---
-
- ## Resources from Arm and partners
- - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling)
- - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works)
- - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics)
- - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/)
- - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/)
- - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/)
- - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine)
- - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines)
- - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym)
- - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/)
-
- ---
-
- ## Support Level
-
- This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
- ## Benefits
-
- Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
- To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
----
-
-
-## Description
-
-### Why is this important?
-
-Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation.
-
-Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face.
-
-[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology)
-
-Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process.
-
-Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released.
-
-### Project Summary
-
-Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate:
-- **Near-identical visuals at lower resolution** (render low → upscale with NSS)
-
-Document your progress and findings and consider alternative applications of the neural technology within games development.
-
-Attempt different environments and objects. For example:
-
-- Daytime vs night
-- Urban city, jungle forest, ocean floor, alien planet, building interiors
-- Complex lighting and shadows
-- NPCs with detailed clothing, faces, hair. Include animations.
-
-Make your scenes dynamic with particle effects, shadows, physics and motion.
-
----
-
-## Pre-requisites
-- Laptop/PC/Mobile for Android Unreal Engine game development
-- Willingness to learn about games development and graphics, and the increasing use of AI in these fields.
-
----
-
-## Resources from Arm and partners
-- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling)
-- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works)
-- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics)
-- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/)
-- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/)
-- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/)
-- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine)
-- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines)
-- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym)
-- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/)
-
----
-
-## Support Level
-
-This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
-
-## Benefits
-
-Standout project contributions will result in digital badges for CV building, recognised by Arm Talent Acquisition. We are currently discussing with national agencies the potential for funding streams for Arm Developer Labs projects, which would flow to you, not us.
-
-
+---
+title: Game-Dev-Using-Neural-Graphics-&-Unreal-Engine
+description: Build a playable Unreal Engine 5 game demo that utilises Arm’s Neural Graphics SDK UE plugin for features such as Neural Super Sampling (NSS). Showcase near-identical image quality at lower resolution by driving neural rendering directly in the graphics pipeline.
+subjects:
+- ML
+- Gaming
+- Libraries
+- Graphics
+requires-team:
+- No
+platform:
+- Mobile, Graphics, and Gaming
+- Laptops and Desktops
+- AI
+sw-hw:
+- Software
+support-level:
+- Self-Service
+- Arm Ambassador Support
+publication-date: 2025-11-27
+license:
+status:
+- Published
+badges: trending
+donation:
+layout: article
+sidebar:
+ nav: projects
+full_description: |-
+
+
+ ## Description
+
+ ### Why is this important?
+
+ Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation.
+
+ Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face.
+
+ [Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology)
+
+ Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process.
+
+ Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released.
+
+ ### Project Summary
+
+ Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate:
+ - **Near-identical visuals at lower resolution** (render low → upscale with NSS)
+
+ Document your progress and findings and consider alternative applications of the neural technology within games development.
+
+ Attempt different environments and objects. For example:
+
+ - Daytime vs night
+ - Urban city, jungle forest, ocean floor, alien planet, building interiors
+ - Complex lighting and shadows
+ - NPCs with detailed clothing, faces, hair. Include animations.
+
+ Make your scenes dynamic with particle effects, shadows, physics and motion.
+
+ ---
+
+ ## Pre-requisites
+ - Laptop/PC/Mobile for Android Unreal Engine game development
+ - Willingness to learn about games development and graphics, and the increasing use of AI in these fields.
+
+ ---
+
+ ## Resources from Arm and partners
+ - Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling)
+ - Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works)
+ - Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics)
+ - Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/)
+ - Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/)
+ - Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/)
+ - Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine)
+ - Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines)
+ - Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym)
+ - Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/)
+
+ ---
+
+ ## Support Level
+
+ This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+ ## Benefits
+
+ Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+ To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
+---
+
+
+
+## Description
+
+### Why is this important?
+
+Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation.
+
+Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face.
+
+[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology)
+
+Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process.
+
+Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released.
+
+### Project Summary
+
+Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate:
+- **Near-identical visuals at lower resolution** (render low → upscale with NSS)
+
+Document your progress and findings and consider alternative applications of the neural technology within games development.
+
+Attempt different environments and objects. For example:
+
+- Daytime vs night
+- Urban city, jungle forest, ocean floor, alien planet, building interiors
+- Complex lighting and shadows
+- NPCs with detailed clothing, faces, hair. Include animations.
+
+Make your scenes dynamic with particle effects, shadows, physics and motion.
+
+---
+
+## Pre-requisites
+- Laptop/PC/Mobile for Android Unreal Engine game development
+- Willingness to learn about games development and graphics, and the increasing use of AI in these fields.
+
+---
+
+## Resources from Arm and partners
+- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling)
+- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works)
+- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics)
+- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/)
+- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/)
+- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/)
+- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine)
+- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines)
+- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym)
+- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/)
+
+---
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md
new file mode 100644
index 00000000..8a3ddc2d
--- /dev/null
+++ b/docs/_posts/2025-11-27-Game-Dev-Using-Neural-Graphics-Unreal-Engine.md
@@ -0,0 +1,64 @@
+
+
+## Description
+
+### Why is this important?
+
+Arm neural technology is an industry first, adding dedicated neural accelerators to Arm GPUs, bringing PC-quality, AI powered graphics to mobile for the first time – and laying the foundation for future on-device AI innovation.
+
+Developers can start building now with the industry’s first open development kit for neural graphics with an Unreal Engine plugin, emulators, and open models on GitHub and Hugging Face.
+
+[Arm Neural Technology Announcement](https://newsroom.arm.com/news/arm-announces-arm-neural-technology)
+
+Neural Super Sampling (NSS) is Arm’s mobile-optimized AI-driven graphics upscaler that improves image quality while lowering resolution. It builds on a prior Arm solution: Accuracy Super Resolution (ASR). It is supported by an Unreal Engine plugin, streamlining its use as part of a typical industry games development process.
+
+Future SDK support will be provided for Neural Frame Rate Upscaling (NFRU) - so feel free to extend this project using NFRU when released.
+
+### Project Summary
+
+Create a small game scene utilising the Arm Neural Graphics UE plugin to demonstrate:
+- **Near-identical visuals at lower resolution** (render low → upscale with NSS)
+
+Document your progress and findings and consider alternative applications of the neural technology within games development.
+
+Attempt different environments and objects. For example:
+
+- Daytime vs night
+- Urban city, jungle forest, ocean floor, alien planet, building interiors
+- Complex lighting and shadows
+- NPCs with detailed clothing, faces, hair. Include animations.
+
+Make your scenes dynamic with particle effects, shadows, physics and motion.
+
+---
+
+## Pre-requisites
+- Laptop/PC/Mobile for Android Unreal Engine game development
+- Willingness to learn about games development and graphics, and the increasing use of AI in these fields.
+
+---
+
+## Resources from Arm and partners
+- Get Started Blog: [Start experimenting with NSS today](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-to-access-arm-neural-super-sampling)
+- Deep Dive Blog: [How NSS works](https://developer.arm.com/community/arm-community-blogs/b/mobile-graphics-and-gaming-blog/posts/how-arm-neural-super-sampling-works)
+- Arm Developer: [Neural Graphics Development Kit](https://developer.arm.com/mobile-graphics-and-gaming/neural-graphics)
+- Learning Path: [Fine-tuning neural graphics models with Model Gym](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/model-training-gym/)
+- Learning Path: [Neural Super Sampling in Unreal Engine](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/nss-unreal/)
+- Learning Path: [Getting started with Arm Accuracy Super Resolution (Arm ASR)](https://learn.arm.com/learning-paths/mobile-graphics-and-gaming/get-started-with-arm-asr/)
+- Unreal Engine Intro by Epic Games: [Understanding the basics](https://dev.epicgames.com/documentation/en-us/unreal-engine/understanding-the-basics-of-unreal-engine)
+- Repo: [Arm Neural Graphics SDK](https://github.com/arm/neural-graphics-sdk-for-game-engines)
+- Repo: [Arm Neural Graphics Model Gym](https://github.com/arm/neural-graphics-model-gym)
+- Documentation: [Arm Neural Graphics SDK for Game Engines Developer guide](https://developer.arm.com/documentation/111167/latest/)
+
+---
+
+## Support Level
+
+This project is designed to be self-serve but comes with opportunity of some community support from Arm Ambassadors, who are part of the Arm Developer program. If you are not already part of our program, [click here to join](https://www.arm.com/resources/developer-program?#register).
+
+## Benefits
+
+Standout project contributions to the community will earn digital badges. These badges can support CV or resumé building and demonstrate earned recognition.
+
+
+To receive the benefits, you must show us your project through our [online form](https://forms.office.com/e/VZnJQLeRhD). Please do not include any confidential information in your contribution. Additionally if you are affiliated with an academic institution, please ensure you have the right to share your material.
\ No newline at end of file
diff --git a/docs/assets/DevLabs_Logo.png b/docs/assets/DevLabs_Logo.png
new file mode 100644
index 00000000..2261533d
Binary files /dev/null and b/docs/assets/DevLabs_Logo.png differ
diff --git a/docs/assets/badges/RA.svg b/docs/assets/badges/RA.svg
new file mode 100644
index 00000000..84d8e397
--- /dev/null
+++ b/docs/assets/badges/RA.svg
@@ -0,0 +1,4 @@
+
\ No newline at end of file
diff --git a/docs/assets/badges/Trending.svg b/docs/assets/badges/Trending.svg
new file mode 100644
index 00000000..0b45f07d
--- /dev/null
+++ b/docs/assets/badges/Trending.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/docs/assets/badges/new.svg b/docs/assets/badges/new.svg
new file mode 100644
index 00000000..b7cc5e54
--- /dev/null
+++ b/docs/assets/badges/new.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/docs/assets/css/badge.css b/docs/assets/css/badge.css
new file mode 100644
index 00000000..f2046f66
--- /dev/null
+++ b/docs/assets/css/badge.css
@@ -0,0 +1,7 @@
+/* inline layout for titles + badges */
+.project-header{display:flex;align-items:center;justify-content:space-between;gap:.5rem}
+.project-header .badges{flex-shrink:0}
+
+/* generic badge styling (used everywhere) */
+.badges{display:inline-flex;gap:.25rem;align-items:center}
+.badge{height:20px;width:auto}
diff --git a/scripts/update_docs.py b/scripts/update_docs.py
index 5ac4822b..fc05e9e5 100644
--- a/scripts/update_docs.py
+++ b/scripts/update_docs.py
@@ -1,23 +1,33 @@
import os
-import sys
import re
import shutil
from pathlib import Path
-import frontmatter
from datetime import datetime
-import ruamel.yaml
-
-projects_dir = "../Projects/Projects"
-extended_projects_dir = "../Projects/Extended-Team-Projects"
-
-projects_pathlist = [Path("../Projects/projects.md")]
-projects_projects_pathlist = Path(projects_dir).rglob("*.md")
-projects_extended_project_pathlist = Path(extended_projects_dir).rglob("*.md")
-research_pathlist = [Path("../Research/research.md")]
-
-docs_posts_dir = "../docs/_posts"
+from io import StringIO
-index_frontmatter = """---
+import frontmatter
+import ruamel.yaml
+from ruamel.yaml.scalarstring import LiteralScalarString
+
+# ----------------------------
+# Paths (relative to this script)
+# ----------------------------
+SCRIPT_DIR = Path(__file__).resolve().parent
+REPO_ROOT = SCRIPT_DIR.parent # adjust if your script is deeper nested
+DOCS_DIR = REPO_ROOT / "docs"
+DOCS_POSTS_DIR = DOCS_DIR / "_posts"
+DOCS_IMAGES_DIR = DOCS_DIR / "images"
+DOCS_CONFIG = DOCS_DIR / "_config.yml"
+
+PROJECTS_DIR = REPO_ROOT / "Projects" / "Projects"
+EXT_PROJECTS_DIR = REPO_ROOT / "Projects" / "Extended-Team-Projects"
+
+PROJECTS_PATHLIST = [REPO_ROOT / "Projects" / "projects.md"]
+PROJECTS_PROJECTS_PATHLIST = PROJECTS_DIR.rglob("*.md")
+PROJECTS_EXTENDED_PATHLIST = EXT_PROJECTS_DIR.rglob("*.md")
+RESEARCH_PATHLIST = [REPO_ROOT / "Research" / "research.md"] # currently unused but kept for future
+
+INDEX_FRONTMATTER = """---
title: Academic Projects Repository
tags: TeXt
article_header:
@@ -27,46 +37,136 @@
---
"""
+# ----------------------------
+# Config helpers
+# ----------------------------
+def load_baseurl(default="/Arm-Developer-labs") -> str:
+ """
+ Reads baseurl from docs/_config.yml.
+ - Falls back to provided default if file or key missing.
+ - Returns a Jekyll-style baseurl: starts with '/', no trailing '/'.
+ """
+ if not DOCS_CONFIG.exists():
+ return default
+
+ yaml = ruamel.yaml.YAML()
+ try:
+ cfg = yaml.load(DOCS_CONFIG.read_text(encoding="utf-8")) or {}
+ except Exception:
+ return default
+
+ baseurl = cfg.get("baseurl", "") or default
+ if not isinstance(baseurl, str):
+ baseurl = str(baseurl)
+
+ if not baseurl.startswith("/"):
+ baseurl = "/" + baseurl
+ baseurl = baseurl.rstrip("/") or "/"
+ return baseurl
+
+BASEURL = load_baseurl()
+
+# ----------------------------
+# Utilities
+# ----------------------------
def clean():
- clean_lst = [docs_posts_dir]
- for dirpath in clean_lst:
- if os.path.exists(dirpath) and os.path.isdir(dirpath):
- shutil.rmtree(dirpath)
- os.makedirs(dirpath)
- else:
- os.makedirs(dirpath)
+ """
+ Clears and recreates the docs/_posts directory.
+ """
+ if DOCS_POSTS_DIR.exists():
+ shutil.rmtree(DOCS_POSTS_DIR)
+ DOCS_POSTS_DIR.mkdir(parents=True, exist_ok=True)
+
+def slugify(filename: str) -> str:
+ """
+ Build a URL-safe slug from the filename (without extension).
+ Lowercase, replace non [a-z0-9-] with '-'.
+ """
+ stem = Path(filename).stem
+ slug = re.sub(r"[^a-z0-9\-]+", "-", stem.lower()).strip("-")
+ return slug or "post"
+
+
+def normalize_date(meta_value, fallback_timestamp: float) -> str:
+ """
+ Accepts:
+ - a datetime
+ - a string 'YYYY-MM-DD' or ISO format
+ - None
+ Returns: 'YYYY-MM-DD' string, or mtime-based fallback.
+ """
+ if meta_value is None:
+ return datetime.fromtimestamp(fallback_timestamp).strftime("%Y-%m-%d")
+
+ if isinstance(meta_value, datetime):
+ return meta_value.strftime("%Y-%m-%d")
+
+ s = str(meta_value)
+ try:
+ return datetime.fromisoformat(s).strftime("%Y-%m-%d")
+ except ValueError:
+ # Fallback: first 10 chars if something odd comes through
+ return s[:10]
+
+
+# ----------------------------
+# Content transforms
+# ----------------------------
def convert_md_images_to_html(md_text: str, doc_path: Path) -> str:
+ """
+ - Finds Markdown images 
+ - Only rewrites *relative* image paths (no http(s)://, no leading /)
+ - Copies each such image into docs/images/
+ - Rewrites to