From c32d2017c216695657bd0ac1c8e53ef88b354fea Mon Sep 17 00:00:00 2001 From: Ali Afzal Date: Thu, 2 Oct 2025 01:23:19 -0700 Subject: [PATCH 1/2] Fixing pre-commit issue from embedding_kernel (#3409) Summary: Fixing pre-commit issue caused due to this change D79659949 https://github.com/meta-pytorch/torchrec/actions/runs/18180801024/job/51756057934?fbclid=IwY2xjawNLCBFleHRuA2FlbQIxMQBicmlkETFOR0JqU1h4cVVkOVVpNVlKAR6QCA25THzR5ZdSRoBWwt54TGJvzh9yOoAsOaU1p8ncbWyNnAhrJjfYsQd0_g_aem_d-Vg6liqJiJg8kRY5A8esA Differential Revision: D83727377 --- torchrec/distributed/embedding_kernel.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/torchrec/distributed/embedding_kernel.py b/torchrec/distributed/embedding_kernel.py index e444f59c8..09a2c0375 100644 --- a/torchrec/distributed/embedding_kernel.py +++ b/torchrec/distributed/embedding_kernel.py @@ -105,7 +105,10 @@ def create_virtual_table_global_metadata( # The param size only has the information for my_rank. In order to # correctly calculate the size for other ranks, we need to use the current # rank's shard size compared to the shard size of my_rank. - curr_rank_rows = (param.size()[0] * metadata.shards_metadata[rank].shard_sizes[0]) // my_rank_shard_size # pyre-ignore[16] + curr_rank_rows = ( + param.size()[0] # pyre-ignore[16] + * metadata.shards_metadata[rank].shard_sizes[0] + ) // my_rank_shard_size else: curr_rank_rows = ( weight_count_per_rank[rank] if weight_count_per_rank is not None else 1 From dee9474ca00fbeabc88ca87006d508b98508ad6d Mon Sep 17 00:00:00 2001 From: Ali Afzal Date: Thu, 2 Oct 2025 01:23:19 -0700 Subject: [PATCH 2/2] Fix Python Flake8 lint error E721. (#3402) Summary: Lint error E721 occurs when the type() function is used for type comparison instead of the more flexible isinstance(). The isinstance() function can handle subclasses as well. The codemod only replaces type() function with isinstance() function for type comparison and preserves the existing code functionality while improving code extensibility for future changes. Differential Revision: D83559508 --- torchrec/distributed/tests/test_dynamic_sharding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchrec/distributed/tests/test_dynamic_sharding.py b/torchrec/distributed/tests/test_dynamic_sharding.py index 9b06db0b6..855c8cc20 100644 --- a/torchrec/distributed/tests/test_dynamic_sharding.py +++ b/torchrec/distributed/tests/test_dynamic_sharding.py @@ -206,7 +206,7 @@ def are_sharded_ebc_modules_identical( val2 = getattr(module2, attr) assert type(val1) is type(val2) - if type(val1) is torch.Tensor: + if isinstance(val1, torch.Tensor): torch.testing.assert_close(val1, val2) else: assert val1 == val2