1+
2+ #Copyright (c) 2024 Oracle and/or its affiliates.
3+ #
4+ #The Universal Permissive License (UPL), Version 1.0
5+ #
6+ #Subject to the condition set forth below, permission is hereby granted to any
7+ #person obtaining a copy of this software, associated documentation and/or data
8+ #(collectively the "Software"), free of charge and under any and all copyright
9+ #rights in the Software, and any and all patent rights owned or freely
10+ #licensable by each licensor hereunder covering either (i) the unmodified
11+ #Software as contributed to or provided by such licensor, or (ii) the Larger
12+ #Works (as defined below), to deal in both
13+ #
14+ #(a) the Software, and
15+ #(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
16+ #one is included with the Software (each a "Larger Work" to which the Software
17+ #is contributed by such licensors),
18+ #
19+ #without restriction, including without limitation the rights to copy, create
20+ #derivative works of, display, perform, and distribute the Software and make,
21+ #use, sell, offer for sale, import, export, have made, and have sold the
22+ #Software and the Larger Work(s), and to sublicense the foregoing rights on
23+ #either these or other terms.
24+ #
25+ #This license is subject to the following condition:
26+ #The above copyright notice and either this complete permission notice or at
27+ #a minimum a reference to the UPL must be included in all copies or
28+ #substantial portions of the Software.
29+ #
30+ #THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31+ #IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32+ #FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
33+ #AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34+ #LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
35+ #OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
36+ #SOFTWARE.
37+
38+
39+ import transformers
40+ import torch
41+
42+ model_id = "meta-llama/Meta-Llama-3-8B-Instruct"
43+ access_token = "<INSERT TOKEN>" # Replace with your actual Hugging Face access token
44+ output_dir = "./" # Specify your desired directory
45+
46+ # Download the model and tokenizer
47+ model = transformers .AutoModelForCausalLM .from_pretrained (
48+ model_id ,
49+ cache_dir = output_dir ,
50+ use_auth_token = access_token , # Directly pass the token here
51+ torch_dtype = torch .bfloat16
52+ )
53+
54+ tokenizer = transformers .AutoTokenizer .from_pretrained (
55+ model_id ,
56+ cache_dir = output_dir ,
57+ use_auth_token = access_token # Directly pass the token here
58+ )
59+
60+ # Save the model and tokenizer to the specified directory
61+ model .save_pretrained (output_dir )
62+ tokenizer .save_pretrained (output_dir )
63+
64+ # Example usage
65+ pipeline = transformers .pipeline (
66+ "text-generation" ,
67+ model = model ,
68+ tokenizer = tokenizer ,
69+ device = "cuda" ,
70+ )
71+
72+ print (pipeline ("Once upon a time" )[0 ]['generated_text' ])
0 commit comments