From 36df7283dfc5ba32e5225342ed256854fac64199 Mon Sep 17 00:00:00 2001 From: Damian Reeves Date: Tue, 15 Oct 2024 22:36:35 -0700 Subject: [PATCH] - For planner to use cpu for search random generator (#2484) Summary: torch.rand() defaults to using the default device. If torch.device has been globally set to 'meta', then this breaks the planner code. Force the device to cpu instead. This ensures we're using the same seeded random number alg as before (so no changes to existing plans). We only generate a handful of random numbers so performance is not a concern. Example breakage: https://fb.workplace.com/groups/260102303573409/posts/507200532196917/ Differential Revision: D64260019 --- torchrec/distributed/planner/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchrec/distributed/planner/utils.py b/torchrec/distributed/planner/utils.py index 7ad237553..5d0a03a27 100644 --- a/torchrec/distributed/planner/utils.py +++ b/torchrec/distributed/planner/utils.py @@ -216,7 +216,7 @@ def clamp(self, x: float) -> float: def uniform(self, A: float, B: float) -> float: "Return a random uniform position in range [A,B]." - u = torch.rand(1, generator=self.gen).item() + u = torch.rand(1, generator=self.gen, device="cpu").item() return A + (B - A) * u def next(self, fy: float) -> Optional[float]: