Skip to content

Commit

Permalink
Fix pyre-check CI (#2476)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #2476

Pyre-check is broken on trunk.

Reviewed By: TroyGarden

Differential Revision: D64075053

fbshipit-source-id: 7e43c35c49ee7fbc7497f36a5787bd8310825a78
  • Loading branch information
PaulZhang12 authored and facebook-github-bot committed Oct 9, 2024
1 parent a41e1a4 commit 5b803f8
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 7 deletions.
4 changes: 2 additions & 2 deletions torchrec/distributed/embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -595,8 +595,8 @@ def __init__(
self._lookups[index] = DistributedDataParallel(
module=lookup,
device_ids=(
[device]
if self._device and self._device.type == "cuda"
[self._device]
if self._device is not None and self._device.type == "cuda"
else None
),
process_group=env.process_group,
Expand Down
4 changes: 2 additions & 2 deletions torchrec/distributed/embedding_tower_sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def __init__(
# Hierarchical DDP
self.interaction = DistributedDataParallel(
module=module.interaction.to(self._device),
device_ids=[self._device],
device_ids=[self._device] if self._device is not None else None,
process_group=self._intra_pg,
gradient_as_bucket_view=True,
broadcast_buffers=False,
Expand Down Expand Up @@ -589,7 +589,7 @@ def __init__(
# Hierarchical DDP
self.interactions[i] = DistributedDataParallel(
module=tower.interaction.to(self._device),
device_ids=[self._device],
device_ids=[self._device] if self._device is not None else None,
process_group=self._intra_pg,
gradient_as_bucket_view=True,
broadcast_buffers=False,
Expand Down
5 changes: 3 additions & 2 deletions torchrec/distributed/embeddingbag.py
Original file line number Diff line number Diff line change
Expand Up @@ -695,8 +695,9 @@ def __init__(
self._lookups[i] = DistributedDataParallel(
module=lookup,
device_ids=(
[device]
if self._device and (self._device.type in {"cuda", "mtia"})
[self._device]
if self._device is not None
and (self._device.type in {"cuda", "mtia"})
else None
),
process_group=env.process_group,
Expand Down
2 changes: 1 addition & 1 deletion torchrec/distributed/fused_embeddingbag.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def __init__(
if isinstance(sharding, DpPooledEmbeddingSharding):
self._lookups[index] = DistributedDataParallel(
module=lookup,
device_ids=[device],
device_ids=[device] if device is not None else None,
process_group=env.process_group,
gradient_as_bucket_view=True,
broadcast_buffers=False,
Expand Down

0 comments on commit 5b803f8

Please sign in to comment.