Skip to content

Commit

Permalink
Add support for jax.sharding.PositionalSharding in Orbax checkpoint…
Browse files Browse the repository at this point in the history
…ing.

Joint work with yaning@

PiperOrigin-RevId: 658124242
  • Loading branch information
SiegeLordEx authored and Orbax Authors committed Jul 31, 2024
1 parent 3cc343c commit 1730661
Show file tree
Hide file tree
Showing 2 changed files with 164 additions and 4 deletions.
67 changes: 66 additions & 1 deletion checkpoint/orbax/checkpoint/metadata/sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import logging
from typing import List, Optional, Tuple, Union
import jax
from jax.experimental import mesh_utils
import numpy as np

PartitionSpecElement = Union[None, str, Tuple[str, ...]]
Expand All @@ -31,6 +32,8 @@
_DEVICE_STR = 'device_str'
_MESH_AXES = 'axis_names'
_MESH_SHAPE = 'shape'
_DEVICES_SHAPE = 'shape'
_MEMORY_KIND = 'memory_kind'


class ShardingTypes(enum.Enum):
Expand Down Expand Up @@ -220,7 +223,60 @@ class GSPMDShardingMetadata(ShardingMetadata):

@dataclasses.dataclass
class PositionalShardingMetadata(ShardingMetadata):
pass
"""PositionalShardingMetadata representing `jax.sharding.PositionalSharding`."""
shape: np.ndarray
memory_kind: Optional[str] = None

@classmethod
def from_jax_sharding(
cls, jax_sharding: jax.sharding.PositionalSharding
) -> 'PositionalShardingMetadata':
return cls(
shape=np.array(list(jax_sharding.shape)),
memory_kind=jax_sharding.memory_kind,
)

def to_jax_sharding(self) -> jax.sharding.PositionalSharding:
return jax.sharding.PositionalSharding(
mesh_utils.create_device_mesh(self.shape),
memory_kind=self.memory_kind,
)

@classmethod
def from_deserialized_dict(
cls, deserialized_dict: dict[str, str]
) -> 'PositionalShardingMetadata':
if _DEVICES_SHAPE in deserialized_dict:
shape = np.array(deserialized_dict[_DEVICES_SHAPE])
memory_kind = deserialized_dict.get(_MEMORY_KIND, None)
return cls(
shape=shape,
memory_kind=memory_kind,
)
else:
raise ValueError(
f'Sharding data not found in deserialized_dict: {deserialized_dict}'
)

def to_serialized_string(self) -> str:
sharding_data = {}
sharding_data[_SHARDING_TYPE] = ShardingTypes.POSITIONAL_SHARDING.value
sharding_data[_DEVICES_SHAPE] = self.shape.tolist()
if self.memory_kind is not None:
sharding_data[_MEMORY_KIND] = self.memory_kind
return json.dumps(sharding_data)

def __repr__(self):
return (
f'PositionalShardingMetadata(shape={self.shape},'
f' memory_kind={self.memory_kind})'
)

def __eq__(self, other):
return (
np.array_equal(self.shape, other.shape)
and self.memory_kind == other.memory_kind
)


def from_jax_sharding(jax_sharding) -> Optional[ShardingMetadata]:
Expand All @@ -229,6 +285,8 @@ def from_jax_sharding(jax_sharding) -> Optional[ShardingMetadata]:
return NamedShardingMetadata.from_jax_sharding(jax_sharding)
elif isinstance(jax_sharding, jax.sharding.SingleDeviceSharding):
return SingleDeviceShardingMetadata.from_jax_sharding(jax_sharding)
elif isinstance(jax_sharding, jax.sharding.PositionalSharding):
return PositionalShardingMetadata.from_jax_sharding(jax_sharding)
else:
logging.warning(
'Conversion for %s has not been implemented.', type(jax_sharding)
Expand All @@ -247,6 +305,13 @@ def from_serialized_string(serialized_str) -> ShardingMetadata:
return SingleDeviceShardingMetadata.from_deserialized_dict(
deserialized_dict
)
elif (
deserialized_dict[_SHARDING_TYPE]
== ShardingTypes.POSITIONAL_SHARDING.value
):
return PositionalShardingMetadata.from_deserialized_dict(
deserialized_dict
)
else:
raise NotImplementedError(
f'Conversion for {deserialized_dict[_SHARDING_TYPE]} has not been'
Expand Down
101 changes: 98 additions & 3 deletions checkpoint/orbax/checkpoint/metadata/sharding_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,39 @@ def test_convert_between_jax_named_sharding_and_sharding_metadata(self):
self.assertIsInstance(converted_jax_sharding, jax.sharding.NamedSharding)
self.assertEqual(converted_jax_sharding, jax_sharding)

def test_convert_between_jax_positional_sharding_and_sharding_metadata(
self,
):
# Convert from `jax.sharding.PositionalSharding` to
# `PositionalShardingMetadata`
jax_sharding = jax.sharding.PositionalSharding(jax.devices()).reshape(
[1, -1]
)
expected_positional_sharding_metadata = (
sharding_metadata.PositionalShardingMetadata(jax_sharding.shape)
)
converted_positional_sharding_metadata = (
sharding_metadata.from_jax_sharding(jax_sharding)
)
self.assertIsInstance(
converted_positional_sharding_metadata,
sharding_metadata.PositionalShardingMetadata,
)
self.assertEqual(
converted_positional_sharding_metadata,
expected_positional_sharding_metadata,
)

# Convert from `PositionalShardingMetadata` to
# `jax.sharding.PositionalSharding`
converted_jax_sharding = (
converted_positional_sharding_metadata.to_jax_sharding()
)
self.assertIsInstance(
converted_jax_sharding, jax.sharding.PositionalSharding
)
self.assertEqual(converted_jax_sharding, jax_sharding)

def test_convert_between_jax_single_device_sharding_and_sharding_metadata(
self,
):
Expand Down Expand Up @@ -104,6 +137,67 @@ def test_convert_between_named_sharding_string_to_named_sharding_metadata(
)
self.assertEqual(converted_named_sharding_metadata, named_sharding_metadata)

def test_positional_sharding_string_to_metadata(
self,
):
# Convert from `PositionalShardingMetadata` to `str`
positional_sharding_metadata = sharding_metadata.PositionalShardingMetadata(
shape=np.array([1, 2])
)
expected_positional_sharding_string = (
'{"sharding_type": "PositionalSharding", "shape": [1, 2]}'
)
positional_sharding_string = (
positional_sharding_metadata.to_serialized_string()
)
self.assertEqual(
positional_sharding_string, expected_positional_sharding_string
)

# Convert from `str` to `PositionalShardingMetadata`
converted_positional_sharding_metadata = (
sharding_metadata.from_serialized_string(positional_sharding_string)
)
self.assertIsInstance(
converted_positional_sharding_metadata,
sharding_metadata.PositionalShardingMetadata,
)
self.assertEqual(
converted_positional_sharding_metadata,
positional_sharding_metadata,
)

def test_positional_sharding_string_to_metadata_with_memory_kind(
self,
):
# Convert from `PositionalShardingMetadata` to `str`
positional_sharding_metadata = sharding_metadata.PositionalShardingMetadata(
shape=np.array([1, 2]), memory_kind="foo"
)
expected_positional_sharding_string = (
'{"sharding_type": "PositionalSharding", "shape": [1, 2],'
' "memory_kind": "foo"}'
)
positional_sharding_string = (
positional_sharding_metadata.to_serialized_string()
)
self.assertEqual(
positional_sharding_string, expected_positional_sharding_string
)

# Convert from `str` to `PositionalShardingMetadata`
converted_positional_sharding_metadata = (
sharding_metadata.from_serialized_string(positional_sharding_string)
)
self.assertIsInstance(
converted_positional_sharding_metadata,
sharding_metadata.PositionalShardingMetadata,
)
self.assertEqual(
converted_positional_sharding_metadata,
positional_sharding_metadata,
)

def test_single_device_sharding_string_to_metadata(
self,
):
Expand Down Expand Up @@ -135,14 +229,15 @@ def test_single_device_sharding_string_to_metadata(
)

def test_convert_to_jax_sharding_unsupported_types(self):
jax_sharding = jax.sharding.PositionalSharding(jax.devices())
jax_sharding = jax.sharding.GSPMDSharding.get_replicated(jax.devices())
warning_message = (
"Conversion for <class 'jax._src.sharding_impls.PositionalSharding'>"
r"Conversion for <class '.*\.GSPMDSharding'>"
" has not been implemented."
)
with self.assertLogs(level="WARNING") as log_output:
sharding_metadata.from_jax_sharding(jax_sharding)
self.assertEqual(log_output[0][0].message, warning_message)
self.assertNotEmpty(log_output[0])
self.assertRegex(log_output[0][0].message, warning_message)


if __name__ == "__main__":
Expand Down

0 comments on commit 1730661

Please sign in to comment.