Skip to content

Commit

Permalink
fix imports
Browse files Browse the repository at this point in the history
  • Loading branch information
abhi-mosaic committed Sep 22, 2022
1 parent f4a70f6 commit 0a74b19
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion composer/trainer/activation_checkpointing.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
# - https://github.com/pytorch/pytorch/blob/caa0ab557dd10e04ca413c1508f76ec8ae5adea3/torch/utils/checkpoint.py
# TODO: once torch 1.13 is released, import functions as needed directly from torch and delete this file.

import warnings
import weakref
from enum import Enum, auto
from functools import partial
from typing import Any, Callable, Dict, Iterable, Iterator, List, Set, Tuple, cast
Expand Down Expand Up @@ -772,7 +774,7 @@ def inner_unpack(packed):

with torch.autograd.graph.saved_tensors_hooks(pack, unpack):
output = function(*args, **kwargs)
if torch.cuda._initialized and preserve_rng_state and not had_cuda_in_fwd:
if torch.cuda._initialized and preserve_rng_state and not had_cuda_in_fwd: # type: ignore
# Cuda was not initialized before running the forward, so we didn't
# stash the CUDA state.
raise RuntimeError(
Expand Down

0 comments on commit 0a74b19

Please sign in to comment.