Skip to content

Commit

Permalink
Redefine FBGEMM targets with gpu_cpp_library [12/N]
Browse files Browse the repository at this point in the history
Summary: - Redefine `merge_pooled_embeddings_*` targets using `gpu_cpp_library`

Differential Revision: D62978444
  • Loading branch information
q10 authored and facebook-github-bot committed Sep 19, 2024
1 parent ebbebd4 commit ec39ce8
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 34 deletions.
12 changes: 1 addition & 11 deletions fbgemm_gpu/bench/merge_embeddings_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,17 +44,7 @@
else:
from fbgemm_gpu.bench.bench_utils import benchmark_torch_function

if torch.version.hip:
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings_hip"
)
else:
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings"
)
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings_cpu"
)
torch.ops.load_library("//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings")


# pyre-fixme[2]: Parameter must be annotated.
Expand Down
9 changes: 3 additions & 6 deletions fbgemm_gpu/fbgemm_gpu/sparse_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,25 +13,22 @@

from fbgemm_gpu.split_embedding_configs import SparseType
from fbgemm_gpu.split_table_batched_embeddings_ops_common import PoolingMode
from fbgemm_gpu.utils.loader import load_torch_module

try:
# pyre-ignore
from fbgemm_gpu import open_source # noqa: F401
except Exception:
load_torch_module("//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings")

if torch.version.hip:
torch.ops.load_library("//deeplearning/fbgemm/fbgemm_gpu:sparse_ops_hip")
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings_hip"
)
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu/codegen:embedding_ops_hip"
)

else:
torch.ops.load_library("//deeplearning/fbgemm/fbgemm_gpu:sparse_ops")
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings"
)
torch.ops.load_library("//deeplearning/fbgemm/fbgemm_gpu/codegen:embedding_ops")

torch.ops.load_library("//deeplearning/fbgemm/fbgemm_gpu:input_combine")
Expand Down
24 changes: 7 additions & 17 deletions fbgemm_gpu/test/merge_pooled_embeddings_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,34 +11,24 @@
import unittest
from typing import Tuple

import fbgemm_gpu

import hypothesis.strategies as st
import torch
from hypothesis import given, settings, Verbosity


try:
# pyre-ignore[21]
from fbgemm_gpu import open_source # noqa: F401
# pyre-fixme[16]: Module `fbgemm_gpu` has no attribute `open_source`.
open_source: bool = getattr(fbgemm_gpu, "open_source", False)

if open_source:
# pyre-ignore[21]
from test_utils import gpu_unavailable
except Exception:
if torch.version.hip:
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings_hip"
)
else:
torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings"
)

torch.ops.load_library(
"//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings_cpu"
)
else:
import fbgemm_gpu.sparse_ops # noqa: F401, E402
from fbgemm_gpu.test.test_utils import gpu_unavailable

open_source = False
torch.ops.load_library("//deeplearning/fbgemm/fbgemm_gpu:merge_pooled_embeddings")

typed_gpu_unavailable: Tuple[bool, str] = gpu_unavailable

Expand Down

0 comments on commit ec39ce8

Please sign in to comment.