Skip to content

Commit

Permalink
[hotfix] resharding cost issue (#1742)
Browse files Browse the repository at this point in the history
  • Loading branch information
YuliangLiu0306 authored Oct 19, 2022
1 parent 24e84eb commit d373e67
Showing 1 changed file with 13 additions and 6 deletions.
Original file line number Diff line number Diff line change
@@ -1,14 +1,21 @@
import operator
from functools import reduce
import warnings
from copy import deepcopy
from functools import reduce
from typing import Dict, List

import torch

from colossalai.auto_parallel.tensor_shard.deprecated._utils import (
enumerate_all_possible_1d_sharding,
enumerate_all_possible_2d_sharding,
exception_handler,
)
from colossalai.auto_parallel.tensor_shard.deprecated.sharding_strategy import ShardingStrategy, StrategiesVector
from .operator_handler import OperatorHandler
from colossalai.tensor.shape_consistency import ShapeConsistencyManager
from colossalai.tensor.sharding_spec import ShardingSpec
from copy import deepcopy
from typing import Dict, List
from colossalai.auto_parallel.tensor_shard.deprecated._utils import exception_handler, enumerate_all_possible_1d_sharding, enumerate_all_possible_2d_sharding

from .operator_handler import OperatorHandler

__all__ = ['WhereHandler']

Expand Down Expand Up @@ -94,7 +101,7 @@ def _generate_resharding_costs(self, sharding_specs):
# compute the resharding cost
_, _, total_resharding_cost = shape_consistency_manager.shape_consistency(
input_sharding_spec, input_spec)

total_resharding_cost = total_resharding_cost['total']
# we need multiply the size of elem dtype to get correct communication cost
resharding_cost = total_resharding_cost * size_per_elem_bytes
resharding_costs[input_node].append(resharding_cost)
Expand Down

0 comments on commit d373e67

Please sign in to comment.