Skip to content

Commit dda5ad0

Browse files
author
Andrew Gu
committed
Removed _experimental_support_context_fn_in_torch_utils_checkpoint
[ghstack-poisoned]
1 parent 3fca883 commit dda5ad0

File tree

1 file changed

+0
-7
lines changed

1 file changed

+0
-7
lines changed

torchtitan/parallelisms/parallelize_llama.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -441,13 +441,6 @@ def apply_compile(model, job_config: JobConfig):
441441
transformer_block = torch.compile(transformer_block, dynamic=False)
442442
model.layers.register_module(layer_id, transformer_block)
443443

444-
ac_config = job_config.activation_checkpoint
445-
if ac_config.mode == "selective" and ac_config.selective_ac_option == "op":
446-
# some temp flags for torch.compile enablement + SAC
447-
torch._dynamo.config._experimental_support_context_fn_in_torch_utils_checkpoint = (
448-
True
449-
)
450-
451444
logger.info("Compiled each TransformerBlock with torch.compile")
452445
return model
453446

0 commit comments

Comments
 (0)