File tree Expand file tree Collapse file tree 2 files changed +4
-13
lines changed Expand file tree Collapse file tree 2 files changed +4
-13
lines changed Original file line number Diff line number Diff line change @@ -535,9 +535,6 @@ def get_cache_key() -> Optional[str]:
535535 )
536536 return f"{ r } :{ rank } :{ tag } "
537537
538- if r := torch .compiler .config .sticky_pgo_key :
539- return f"sticky:{ r } :{ rank } :{ tag } "
540-
541538 if (name_version := torch ._utils_internal .get_mast_job_name_version ()) is not None :
542539 mast_job_name , mast_job_version = name_version
543540 return f"mast:{ mast_job_name } :{ mast_job_version } :{ rank } :{ tag } "
Original file line number Diff line number Diff line change 2929# FB-internal note: you do NOT have to specify this explicitly specify this if
3030# you run on MAST, we will automatically default this to
3131# mast:MAST_JOB_NAME:MAST_JOB_VERSION.
32- job_id : Optional [str ] = Config (env_name_default = "TORCH_COMPILE_JOB_ID" , default = None )
32+ job_id : Optional [str ] = Config (
33+ env_name_default = ["TORCH_COMPILE_JOB_ID" , "TORCH_COMPILE_STICKY_PGO_KEY" ],
34+ default = None ,
35+ )
3336"""
3437Semantically, this should be an identifier that uniquely identifies, e.g., a
3538training job. You might have multiple attempts of the same job, e.g., if it was
7477and force_parameter_static_shapes.
7578"""
7679
77- sticky_pgo_key : str = Config (
78- env_name_default = "TORCH_COMPILE_STICKY_PGO_KEY" , default = ""
79- )
80- """
81- If you want to share PGO profiles across different jobs (and not just attempts), you can set
82- this to a string that identifies the shared profile. This is useful if you want to share PGO profiles
83- for models that are not identical, but are similar enough to share PGO profiles.
84- """
85-
8680unbacked_sources : str = Config (
8781 env_name_default = "TORCH_COMPILE_UNBACKED_SOURCES" , default = ""
8882)
You can’t perform that action at this time.
0 commit comments