Skip to content

Commit

Permalink
Remove a bit of Python 2.7 code (Azure#38735)
Browse files Browse the repository at this point in the history
* Remove a bit of Python 2.7 code

* Fix typo

* Remove _str consistently

* More replacements

* fix mock

* fix

* reset stuff

* Reset changes in legacy servicemanagement

* fix _str

* fix

* pylint

---------

Co-authored-by: Mads Jensen <[email protected]>
  • Loading branch information
atombrella and atombrella authored Dec 20, 2024
1 parent 980fe42 commit bf50574
Show file tree
Hide file tree
Showing 21 changed files with 76 additions and 224 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,10 @@
- Null.
"""

import abc
from abc import ABCMeta, abstractmethod
import json
import logging
import re
import sys
from six import with_metaclass

PY2 = sys.version_info[0] == 2

if PY2:
_str = unicode # pylint: disable=undefined-variable
else:
_str = str

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -140,7 +131,7 @@ class SchemaParseException(AvroException):
"""Error while parsing a JSON schema descriptor."""


class Schema(with_metaclass(abc.ABCMeta, object)):
class Schema(metaclass=ABCMeta):
"""Abstract base class for all Schema classes."""

def __init__(self, data_type, other_props=None):
Expand Down Expand Up @@ -198,7 +189,7 @@ def __str__(self):
"""Returns: the JSON representation of this schema."""
return json.dumps(self.to_json(names=None))

@abc.abstractmethod
@abstractmethod
def to_json(self, names):
"""Converts the schema object into its AVRO specification representation.
Expand Down Expand Up @@ -286,7 +277,7 @@ def fullname(self):
# ------------------------------------------------------------------------------


class Names(object):
class Names:
"""Tracks Avro named schemas and default namespace during parsing."""

def __init__(self, default_namespace=None, names=None):
Expand Down Expand Up @@ -447,7 +438,7 @@ def name_ref(self, names):
return self.name
return self.fullname

@abc.abstractmethod
@abstractmethod
def to_json(self, names):
"""Converts the schema object into its AVRO specification representation.
Expand Down Expand Up @@ -489,7 +480,7 @@ def __init__(
doc:
other_props:
"""
if (not isinstance(name, _str)) or (not name):
if (not isinstance(name, str)) or (not name):
raise SchemaParseException('Invalid record field name: %r.' % name)
if (order is not None) and (order not in VALID_FIELD_SORT_ORDERS):
raise SchemaParseException('Invalid record field order: %r.' % order)
Expand Down Expand Up @@ -564,8 +555,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -679,7 +670,7 @@ def __init__(
symbols = tuple(symbols)
symbol_set = frozenset(symbols)
if (len(symbol_set) != len(symbols)
or not all(map(lambda symbol: isinstance(symbol, _str), symbols))):
or not all(map(lambda symbol: isinstance(symbol, str), symbols))):
raise AvroException(
'Invalid symbols for enum schema: %r.' % (symbols,))

Expand Down Expand Up @@ -747,8 +738,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -784,8 +775,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -841,8 +832,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -1031,8 +1022,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -1164,7 +1155,7 @@ def MakeFields(names):

# Parsers for the JSON data types:
_JSONDataParserTypeMap = {
_str: _schema_from_json_string,
str: _schema_from_json_string,
list: _schema_from_json_array,
dict: _schema_from_json_object,
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,6 @@

import sys

if sys.version_info < (3,):
def _str(value):
if isinstance(value, unicode): # pylint: disable=undefined-variable
return value.encode('utf-8')

return str(value)
else:
_str = str


def _to_utc_datetime(value):
return value.strftime('%Y-%m-%dT%H:%M:%SZ')
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,12 @@
import types
from typing import Any, TYPE_CHECKING
from wsgiref.handlers import format_date_time
try:
from urllib.parse import (
urlparse,
parse_qsl,
urlunparse,
urlencode,
)
except ImportError:
from urllib import urlencode # type: ignore
from urlparse import ( # type: ignore
urlparse,
parse_qsl,
urlunparse,
)
from urllib.parse import (
urlparse,
parse_qsl,
urlunparse,
urlencode,
)

from azure.core.pipeline.policies import (
HeadersPolicy,
Expand All @@ -41,10 +33,6 @@

from .models import LocationMode

try:
_unicode_type = unicode # type: ignore
except NameError:
_unicode_type = str

if TYPE_CHECKING:
from azure.core.pipeline import PipelineRequest, PipelineResponse
Expand All @@ -54,8 +42,6 @@


def encode_base64(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
encoded = base64.b64encode(data)
return encoded.decode('utf-8')

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from datetime import date

from .parser import _str, _to_utc_datetime
from .parser import _to_utc_datetime
from .constants import X_MS_VERSION
from . import sign_string, url_quote

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,7 @@
import json
import logging
import re
import sys
from six import with_metaclass

PY2 = sys.version_info[0] == 2

if PY2:
_str = unicode # pylint: disable=undefined-variable
else:
_str = str

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -140,7 +132,7 @@ class SchemaParseException(AvroException):
"""Error while parsing a JSON schema descriptor."""


class Schema(with_metaclass(abc.ABCMeta, object)):
class Schema(metaclass=abc.ABCMeta):
"""Abstract base class for all Schema classes."""

def __init__(self, data_type, other_props=None):
Expand All @@ -154,9 +146,8 @@ def __init__(self, data_type, other_props=None):
raise SchemaParseException('%r is not a valid Avro type.' % data_type)

# All properties of this schema, as a map: property name -> property value
self._props = {}
self._props = {'type': data_type}

self._props['type'] = data_type
self._type = data_type

if other_props:
Expand Down Expand Up @@ -222,7 +213,7 @@ def to_json(self, names):
)


class Name(object):
class Name:
"""Representation of an Avro name."""

def __init__(self, name, namespace=None):
Expand Down Expand Up @@ -489,9 +480,9 @@ def __init__(
doc:
other_props:
"""
if (not isinstance(name, _str)) or (not name):
if not isinstance(name, str) or not name:
raise SchemaParseException('Invalid record field name: %r.' % name)
if (order is not None) and (order not in VALID_FIELD_SORT_ORDERS):
if order is not None and order not in VALID_FIELD_SORT_ORDERS:
raise SchemaParseException('Invalid record field order: %r.' % order)

# All properties of this record field:
Expand Down Expand Up @@ -564,8 +555,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -679,7 +670,7 @@ def __init__(
symbols = tuple(symbols)
symbol_set = frozenset(symbols)
if (len(symbol_set) != len(symbols)
or not all(map(lambda symbol: isinstance(symbol, _str), symbols))):
or not all(map(lambda symbol: isinstance(symbol, str), symbols))):
raise AvroException(
'Invalid symbols for enum schema: %r.' % (symbols,))

Expand Down Expand Up @@ -747,8 +738,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -784,8 +775,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -841,8 +832,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -1031,8 +1022,8 @@ def to_json(self, names=None):
return to_dump

def __eq__(self, that):
to_cmp = json.loads(_str(self))
return to_cmp == json.loads(_str(that))
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))


# ------------------------------------------------------------------------------
Expand Down Expand Up @@ -1164,7 +1155,7 @@ def MakeFields(names):

# Parsers for the JSON data types:
_JSONDataParserTypeMap = {
_str: _schema_from_json_string,
str: _schema_from_json_string,
list: _schema_from_json_array,
dict: _schema_from_json_object,
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,13 @@
import uuid
import types
from typing import Any, TYPE_CHECKING
from urllib.parse import (
urlparse,
parse_qsl,
urlunparse,
urlencode,
)
from wsgiref.handlers import format_date_time
try:
from urllib.parse import (
urlparse,
parse_qsl,
urlunparse,
urlencode,
)
except ImportError:
from urllib import urlencode # type: ignore
from urlparse import ( # type: ignore
urlparse,
parse_qsl,
urlunparse,
)

from azure.core.pipeline.policies import (
HeadersPolicy,
Expand All @@ -41,10 +33,6 @@

from .models import LocationMode

try:
_unicode_type = unicode # type: ignore
except NameError:
_unicode_type = str

if TYPE_CHECKING:
from azure.core.pipeline import PipelineRequest, PipelineResponse
Expand All @@ -54,8 +42,6 @@


def encode_base64(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
encoded = base64.b64encode(data)
return encoded.decode('utf-8')

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import logging
import os
from unittest import mock
from unittest.mock import Mock, patch

import mock
import pytest
from test_utilities.constants import Test_Resource_Group, Test_Subscription

Expand Down
Loading

0 comments on commit bf50574

Please sign in to comment.