Skip to content

Commit

Permalink
Silence existing errors in deeplearning (pytorch#2787)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: pytorch#2787

No functional change, silencing errors before flipping this directory to type checked by default

Reviewed By: grievejia

Differential Revision: D59076824

fbshipit-source-id: f476076e3a5a168485d22bf19c93c4480b219953
  • Loading branch information
Maggie Moss authored and facebook-github-bot committed Jun 27, 2024
1 parent 7adfaa8 commit 24140d5
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions fbgemm_gpu/fbgemm_gpu/tbe/ssd/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,7 @@ def __init__(
self.timesteps_prefetched: List[int] = []
self.ssd_scratch_pads: List[Tuple[Tensor, Tensor, Tensor]] = []
# TODO: add type annotation
# pyre-fixme[4]: Attribute must be annotated.
self.ssd_prefetch_data = []

if weight_decay_mode == WeightDecayMode.COUNTER or counter_based_regularization:
Expand Down Expand Up @@ -328,6 +329,7 @@ def __init__(
offsets=[0] * (len(rows)),
),
"weights",
# pyre-fixme[6]: For 3rd argument expected `Type[dtype]` but got `dtype`.
dtype=table_embedding_dtype,
)

Expand All @@ -341,6 +343,7 @@ def __init__(
offsets=momentum1_offsets[:-1],
),
"momentum1",
# pyre-fixme[6]: For 3rd argument expected `Type[dtype]` but got `dtype`.
dtype=torch.float32,
)

Expand Down Expand Up @@ -454,6 +457,8 @@ def _compute_cache_ptrs(
(inserted_rows_gpu, post_bwd_evicted_indices, actions_count_cpu)
)

# pyre-fixme[7]: Expected `Tensor` but got `Tuple[typing.Any, Tensor,
# typing.Any, Tensor]`.
return (
lxu_cache_ptrs,
inserted_rows_gpu,
Expand Down Expand Up @@ -621,6 +626,7 @@ def forward(
)

if self.optimizer == OptimType.EXACT_ROWWISE_ADAGRAD:
# pyre-fixme[7]: Expected `Tensor` but got implicit return value of `None`.
return invokers.lookup_rowwise_adagrad_ssd.invoke(
common_args, self.optimizer_args, momentum1
)
Expand Down

0 comments on commit 24140d5

Please sign in to comment.