We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d0fb772 commit bdd2507Copy full SHA for bdd2507
tests/integration_tests/test_policy_update.py
@@ -251,15 +251,14 @@ async def llama3_torchstore_setup():
251
252
store = await MultiProcessStore.create_store()
253
254
- model_path = "/tmp/Meta-Llama-3.1-8B-Instruct"
+ model_path = "meta-llama/Meta-Llama-3.1-8B-Instruct"
255
256
# Load the model from local path - using device_map="auto" for efficient loading
257
model = AutoModelForCausalLM.from_pretrained(
258
model_path,
259
torch_dtype=torch.float16, # Use half precision to save memory
260
device_map="auto",
261
trust_remote_code=True,
262
- local_files_only=True, # Ensure we don't try to download
263
)
264
265
original_state_dict = model.state_dict()
0 commit comments