Skip to content

Commit daa98ae

Browse files
authored
Merge pull request #50 from purdue-aalp/huggingface_app
add huggingface sample app
2 parents 83580d5 + e56df7d commit daa98ae

File tree

7 files changed

+124
-3
lines changed

7 files changed

+124
-3
lines changed

.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,6 @@ src/cuda/rodinia/3.1/cuda/nn/nn
1313
src/cuda/rodinia/3.1/cuda/particlefilter/particlefilter_float
1414
src/cuda/rodinia/3.1/cuda/particlefilter/particlefilter_naive
1515
src/cuda/rodinia/3.1/cuda/pathfinder/pathfinder
16-
4.2
16+
4.2
17+
.venv/
18+
__pycache__/

src/Makefile

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ $(error You must run "source setup_environment before calling make")
44
endif
55

66
ifeq ($(CUDA_GT_7), 1)
7-
all: GPU_Microbenchmark microbench rodinia_2.0-ft cutlass rodinia-3.1 pannotia proxy-apps ispass-2009 lonestargpu-2.0 polybench custom_apps heterosync cuda_samples mlperf_inference vllm # mlperf_training
7+
all: GPU_Microbenchmark microbench rodinia_2.0-ft cutlass rodinia-3.1 pannotia proxy-apps ispass-2009 lonestargpu-2.0 polybench custom_apps heterosync cuda_samples mlperf_inference vllm huggingface # mlperf_training
88
else
99
ifeq ($(CUDA_GT_4), 1)
1010
all: pannotia rodinia_2.0-ft proxy-apps dragon-naive microbench rodinia-3.1 ispass-2009 dragon-cdp lonestargpu-2.0 polybench parboil shoc custom_apps
@@ -19,7 +19,7 @@ accelwattch_hw_power: rodinia-3.1_hw_power parboil_hw_power cuda_samples-11.0_hw
1919
#Disable clean for now, It has a bug!
2020
# clean_dragon-naive clean_pannotia clean_proxy-apps
2121

22-
clean: clean_mlperf_inference clean_rodinia_2.0-ft clean_dragon-cdp clean_ispass-2009 clean_lonestargpu-2.0 clean_custom_apps clean_parboil clean_cutlass clean_rodinia-3.1 clean_heterosync clean_UVMSmart_test clean_cuda_samples
22+
clean: clean_mlperf_inference clean_rodinia_2.0-ft clean_dragon-cdp clean_ispass-2009 clean_lonestargpu-2.0 clean_custom_apps clean_parboil clean_cutlass clean_rodinia-3.1 clean_heterosync clean_UVMSmart_test clean_cuda_samples clean_huggingface
2323
clean_accelwattch: clean_rodinia-3.1 clean_parboil clean_cutlass clean_cuda_samples-11.0 clean_cuda_samples_hw_power clean_rodinia-3.1_hw_power clean_parboil_hw_power clean_accelwattch_ubench
2424

2525
clean_data:
@@ -511,6 +511,14 @@ pytorch_examples:
511511
echo "#!/bin/bash\npython $(BINDIR)/$(BINSUBDIR)/pytorch_examples/vae/main.py --epochs=1" > $(BINDIR)/$(BINSUBDIR)/inference_vae
512512
chmod u+x $(BINDIR)/$(BINSUBDIR)/inference_vae
513513

514+
huggingface:
515+
mkdir -p $(BINDIR)/$(BINSUBDIR)/huggingface
516+
cp -r cuda/huggingface $(BINDIR)/$(BINSUBDIR)
517+
bash $(BINDIR)/$(BINSUBDIR)/huggingface/setup_environment.sh
518+
echo "source $(BINDIR)/$(BINSUBDIR)/huggingface/setup_environment.sh && python3 $(BINDIR)/$(BINSUBDIR)/huggingface/helloworld.py" > $(BINDIR)/$(BINSUBDIR)/huggingface/helloworld
519+
chmod u+x $(BINDIR)/$(BINSUBDIR)/huggingface/helloworld
520+
chmod u+x $(BINDIR)/$(BINSUBDIR)/huggingface/*.py
521+
514522
clean_heterosync:
515523
rm -rf cuda/heterosync
516524

@@ -685,3 +693,6 @@ clean_pytorch_examples:
685693

686694
clean_cuda_samples:
687695
make clean -C ./cuda/cuda-samples/build
696+
697+
clean_huggingface:
698+
rm -rf $(BINDIR)/$(BINSUBDIR)/huggingface

src/cuda/huggingface/README.md

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
# Huggingface example
2+
3+
Run huggingface models
4+
5+
## Loading gated model
6+
7+
Make sure to create a `.env` file in the folder with the following content:
8+
9+
```bash
10+
HF_TOKEN=hf_...
11+
```
12+
13+
## Setup environment
14+
15+
```
16+
./setup_environment.sh
17+
```
18+
19+
## Run example
20+
21+
```bash
22+
./helloworld.py --model_name "openai-community/gpt2-large" --prompt "Hello World! In a galaxy far, far away..."
23+
```

src/cuda/huggingface/helloworld.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# Hello world for huggingface
2+
import argparse
3+
import transformers
4+
from dotenv import load_dotenv
5+
6+
from util import getCommonParser
7+
8+
def main():
9+
load_dotenv()
10+
11+
helloworldParser = argparse.ArgumentParser(parents=[getCommonParser()])
12+
helloworldParser.add_argument("--prompt", type=str, default="Hello World! In a galaxy far, far away...")
13+
helloworldParser.add_argument("--max_length", type=int, default=256)
14+
args = helloworldParser.parse_args()
15+
16+
helloworld_str = f"Running Hello world for \"{args.model_name}\" with prompt: \"{args.prompt}\""
17+
print(helloworld_str)
18+
print("=" * len(helloworld_str))
19+
20+
pipe = transformers.pipeline(task="text-generation", model=args.model_name, device=args.device)
21+
results = pipe(args.prompt, max_length=args.max_length, truncation=True)
22+
for result in results:
23+
print(result["generated_text"])
24+
25+
if __name__ == "__main__":
26+
main()
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Huggingface
2+
transformers
3+
datasets
4+
accelerate
5+
6+
# Load environment variables from .env file
7+
python-dotenv
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
#!/bin/bash
2+
# Setup environment for running huggingface examples
3+
4+
# Check if we are already in a virtual environment
5+
if [ -n "$VIRTUAL_ENV" ]; then
6+
echo "Already in a virtual environment, skipping setup"
7+
exit 0
8+
fi
9+
10+
# Get the location of this script when sourcing
11+
if test -n "$BASH" ; then SCRIPT_LOC=$BASH_SOURCE
12+
elif test -n "$ZSH_NAME" ; then SCRIPT_LOC=${(%):-%x}
13+
else
14+
echo "WARNING this script only tested with bash and zsh, use with caution with your shell at $SHELL"
15+
if test -n "$TMOUT"; then SCRIPT_LOC=${.sh.file}
16+
elif test ${0##*/} = dash; then x=$(lsof -p $$ -Fn0 | tail -1); SCRIPT_LOC=${x#n}
17+
elif test -n "$FISH_VERSION" ; then SCRIPT_LOC=(status current-filename)
18+
else echo "ERROR unknown shell, cannot determine script location" && return 1
19+
fi
20+
fi
21+
22+
# Get the directory of the script
23+
SCRIPT_DIR=${SCRIPT_LOC%/*}
24+
25+
# Check if virtual environment exists
26+
if [ ! -d "$SCRIPT_DIR/.venv" ]; then
27+
# Create virtual environment
28+
python3 -m venv $SCRIPT_DIR/.venv
29+
30+
# Activate virtual environment
31+
source $SCRIPT_DIR/.venv/bin/activate
32+
33+
# Install dependencies
34+
pip install -r $SCRIPT_DIR/requirements.txt
35+
else
36+
# Activate virtual environment
37+
source $SCRIPT_DIR/.venv/bin/activate
38+
fi
39+
40+
# Permission for python scripts
41+
chmod u+x $SCRIPT_DIR/*.py
42+
43+
echo "Environment setup complete"

src/cuda/huggingface/util.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# Common flags for running huggingface examples
2+
import argparse
3+
4+
def getCommonParser(is_parent=True):
5+
# If is_parent is True, don't add the help flag to avoid conflict
6+
parser = argparse.ArgumentParser(add_help=not is_parent)
7+
parser.add_argument("--model_name", type=str, default="openai-community/gpt2-large")
8+
parser.add_argument("--device", type=str, default="cuda")
9+
return parser

0 commit comments

Comments
 (0)