Skip to content

Commit

Permalink
Merge branch 'Azure:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
v-rvilathurs committed Jul 5, 2024
2 parents 429e346 + dd53313 commit ff346c2
Show file tree
Hide file tree
Showing 452 changed files with 78,461 additions and 47,535 deletions.
24 changes: 24 additions & 0 deletions linter_exclusions.yml
Original file line number Diff line number Diff line change
Expand Up @@ -438,6 +438,16 @@ databox job mark-devices-shipped:
deliver_package_details:
rule_exclusions:
- option_length_too_long
databricks access-connector create:
parameters:
user_assigned_identities:
rule_exclusions:
- option_length_too_long
databricks access-connector update:
parameters:
user_assigned_identities:
rule_exclusions:
- option_length_too_long
databricks workspace create:
parameters:
managed_resource_group:
Expand All @@ -446,6 +456,20 @@ databricks workspace create:
require_infrastructure_encryption:
rule_exclusions:
- option_length_too_long
default_storage_firewall:
rule_exclusions:
- option_length_too_long
enhanced_security_compliance:
rule_exclusions:
- option_length_too_long
databricks workspace update:
parameters:
default_storage_firewall:
rule_exclusions:
- option_length_too_long
enhanced_security_compliance:
rule_exclusions:
- option_length_too_long
databricks workspace vnet-peering create:
parameters:
allow_forwarded_traffic:
Expand Down
5 changes: 3 additions & 2 deletions scripts/ci/release_version_cal.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# pylint: disable=line-too-long
import os
import re
import json
from packaging.version import parse

from azdev.utilities.path import get_cli_repo_path, get_ext_repo_paths
Expand All @@ -22,8 +23,8 @@
changed_module_list = os.environ.get('changed_module_list', "").split()
diff_code_file = os.environ.get('diff_code_file', "")
print("diff_code_file:", diff_code_file)
pr_label_list = os.environ.get('pr_label_list', "").split()
pr_label_list = [name.lower().strip().strip('"').strip("'") for name in pr_label_list]
pr_label_list = os.environ.get('pr_label_list', "")
pr_label_list = [name.lower().strip().strip('"').strip("'") for name in json.loads(pr_label_list)]

DEFAULT_VERSION = "0.0.0"
INIT_RELEASE_VERSION = "1.0.0b1"
Expand Down
5 changes: 5 additions & 0 deletions src/aks-preview/HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ To release a new version, please select a new version number (usually plus 1 to

Pending
+++++++
* Add validation to `az aks create` and `az aks update` while modifying the `--ephemeral-disk-volume-type` and `--ephemeral-disk-nvme-perf-tier` values.

5.0.0b4
++++++++
* Add additional unit test cases for mutable fips flags in agentpool update.

5.0.0b3
++++++++
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,20 +303,37 @@ def validate_enable_azure_container_storage_params( # pylint: disable=too-many-
f'already enabled for storage pool option {enabled_options}.'
)
else:
if ephemeral_disk_volume_type is not None and ephemeral_disk_nvme_perf_tier is None and \
if required_type_installed_for_disk_vol_type and \
ephemeral_disk_volume_type is not None and \
ephemeral_disk_nvme_perf_tier is None and \
existing_ephemeral_disk_volume_type.lower() == ephemeral_disk_volume_type.lower():
raise InvalidArgumentValueError(
'Azure Container Storage is already configured with --ephemeral-disk-volume-type '
f'value set to {existing_ephemeral_disk_volume_type}.'
)

if ephemeral_disk_nvme_perf_tier is not None and ephemeral_disk_volume_type is None and \
if required_type_installed_for_nvme_perf_tier and \
ephemeral_disk_nvme_perf_tier is not None and \
ephemeral_disk_volume_type is None and \
existing_ephemeral_disk_nvme_perf_tier.lower() == ephemeral_disk_nvme_perf_tier.lower():
raise InvalidArgumentValueError(
'Azure Container Storage is already configured with --ephemeral-disk-nvme-perf-tier '
f'value set to {existing_ephemeral_disk_nvme_perf_tier}.'
)

# pylint: disable=too-many-boolean-expressions
if required_type_installed_for_disk_vol_type and \
ephemeral_disk_volume_type is not None and \
existing_ephemeral_disk_volume_type.lower() == ephemeral_disk_volume_type.lower() and \
required_type_installed_for_nvme_perf_tier and \
ephemeral_disk_nvme_perf_tier is not None and \
existing_ephemeral_disk_nvme_perf_tier.lower() == ephemeral_disk_nvme_perf_tier.lower():
raise InvalidArgumentValueError(
'Azure Container Storage is already configured with --ephemeral-disk-volume-type '
f'value set to {existing_ephemeral_disk_volume_type} and --ephemeral-disk-nvme-perf-tier '
f'value set to {existing_ephemeral_disk_nvme_perf_tier}.'
)

if storage_pool_option == CONST_ACSTOR_ALL:
raise InvalidArgumentValueError(
f'Cannot set --storage-pool-option value as {CONST_ACSTOR_ALL} '
Expand Down

Large diffs are not rendered by default.

Large diffs are not rendered by default.

1,276 changes: 454 additions & 822 deletions ...iew/tests/latest/recordings/test_aks_create_with_pod_ip_allocation_mode_static_block.yaml
100644 → 100755

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -4684,6 +4684,16 @@ def test_aks_create_update_fips_flow(self, resource_group, resource_group_locati
],
)

# verify no flag no change
self.cmd(
"aks nodepool update --resource-group={resource_group} --cluster-name={name} --name={node_pool_name} "
'--aks-custom-headers AKSHTTPCustomFeatures=Microsoft.ContainerService/MutableFipsPreview',
checks=[
self.check("provisioningState", "Succeeded"),
self.check("enableFips", True),
],
)

# verify same update no change
self.cmd(
"aks nodepool update --resource-group={resource_group} --cluster-name={name} --name={node_pool_name} "
Expand Down Expand Up @@ -4720,6 +4730,16 @@ def test_aks_create_update_fips_flow(self, resource_group, resource_group_locati
],
)

# verify no flag no change
self.cmd(
"aks nodepool update --resource-group={resource_group} --cluster-name={name} --name={node_pool_name_second} "
'--aks-custom-headers AKSHTTPCustomFeatures=Microsoft.ContainerService/MutableFipsPreview',
checks=[
self.check("provisioningState", "Succeeded"),
self.check("enableFips", False),
],
)

# verify same update no change
self.cmd(
"aks nodepool update --resource-group={resource_group} --cluster-name={name} --name={node_pool_name_second} "
Expand Down
41 changes: 41 additions & 0 deletions src/aks-preview/azext_aks_preview/tests/latest/test_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -969,6 +969,47 @@ def test_enable_with_ephemeral_disk_nvme_perf_tier_and_ephemeral_temp_disk_pool(
)
self.assertEqual(str(cm.exception), err)

def test_enable_with_same_ephemeral_disk_nvme_perf_tier_already_set(self):
perf_tier = acstor_consts.CONST_EPHEMERAL_NVME_PERF_TIER_PREMIUM
storage_pool_type = acstor_consts.CONST_STORAGE_POOL_TYPE_EPHEMERAL_DISK
err = (
"Azure Container Storage is already configured with --ephemeral-disk-nvme-perf-tier "
f"value set to {perf_tier}."
)
with self.assertRaises(InvalidArgumentValueError) as cm:
acstor_validator.validate_enable_azure_container_storage_params(
storage_pool_type, None, None, None, None, None, None, True, False, False, False, True, None, perf_tier, acstor_consts.CONST_DISK_TYPE_PV_WITH_ANNOTATION, acstor_consts.CONST_EPHEMERAL_NVME_PERF_TIER_PREMIUM
)
self.assertEqual(str(cm.exception), err)

def test_enable_with_same_ephemeral_disk_volume_type_already_set(self):
disk_vol_type = acstor_consts.CONST_DISK_TYPE_PV_WITH_ANNOTATION
storage_pool_type = acstor_consts.CONST_STORAGE_POOL_TYPE_EPHEMERAL_DISK
err = (
"Azure Container Storage is already configured with --ephemeral-disk-volume-type "
f"value set to {disk_vol_type}."
)
with self.assertRaises(InvalidArgumentValueError) as cm:
acstor_validator.validate_enable_azure_container_storage_params(
storage_pool_type, None, None, None, None, None, None, True, False, False, False, True, disk_vol_type, None, acstor_consts.CONST_DISK_TYPE_PV_WITH_ANNOTATION, acstor_consts.CONST_EPHEMERAL_NVME_PERF_TIER_PREMIUM
)
self.assertEqual(str(cm.exception), err)

def test_enable_with_same_ephemeral_disk_nvme_perf_tier_and_ephemeral_temp_disk_pool_already_set(self):
perf_tier = acstor_consts.CONST_EPHEMERAL_NVME_PERF_TIER_STANDARD
disk_vol_type = acstor_consts.CONST_DISK_TYPE_PV_WITH_ANNOTATION
storage_pool_type = acstor_consts.CONST_STORAGE_POOL_TYPE_EPHEMERAL_DISK
err = (
"Azure Container Storage is already configured with --ephemeral-disk-volume-type "
f"value set to {disk_vol_type} and --ephemeral-disk-nvme-perf-tier "
f"value set to {perf_tier}."
)
with self.assertRaises(InvalidArgumentValueError) as cm:
acstor_validator.validate_enable_azure_container_storage_params(
storage_pool_type, None, None, None, None, None, None, True, False, False, False, True, disk_vol_type, perf_tier, acstor_consts.CONST_DISK_TYPE_PV_WITH_ANNOTATION, acstor_consts.CONST_EPHEMERAL_NVME_PERF_TIER_STANDARD
)
self.assertEqual(str(cm.exception), err)

def test_enable_with_option_all_and_ephemeral_disk_pool(self):
storage_pool_name = "valid-name"
storage_pool_option = acstor_consts.CONST_ACSTOR_ALL
Expand Down
2 changes: 1 addition & 1 deletion src/aks-preview/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from setuptools import setup, find_packages

VERSION = "5.0.0b3"
VERSION = "5.0.0b4"

CLASSIFIERS = [
"Development Status :: 4 - Beta",
Expand Down
6 changes: 5 additions & 1 deletion src/amg/HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -64,4 +64,8 @@ Release History

1.3.4
++++++
* `az grafana dashboard sync`: use case-insensitive comparison for library panel folders
* `az grafana dashboard sync`: use case-insensitive comparison for library panel folders

1.3.5
++++++
* `az grafana dashboard sync`: fix version mismatch issue for library panel sync
16 changes: 10 additions & 6 deletions src/amg/azext_amg/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def sync(cmd, source, destination, folders_to_include=None, folders_to_exclude=N
continue

# Figure out whether we shall correct the data sources. It is possible the Uids are different
remap_datasource_uids(source_dashboard.get("dashboard"), uid_mapping, data_source_missed)
remap_datasource_uids(source_dashboard["dashboard"], uid_mapping, data_source_missed)

if not dry_run:
delete_dashboard(cmd, destination_workspace, dashboard_uid,
Expand All @@ -144,6 +144,7 @@ def sync(cmd, source, destination, folders_to_include=None, folders_to_exclude=N
continue

panel_name = content["result"]['name']
panel_folder_uid = content["result"]["folderUid"]
panel_folder_name = content["result"]["meta"]["folderName"]

# user error case where library panel in dashboard is not in an excluded folder
Expand All @@ -160,19 +161,22 @@ def sync(cmd, source, destination, folders_to_include=None, folders_to_exclude=N

if not dry_run:
logger.info("Syncing library panel: %s", panel_folder_name + "/" + panel_name)
endpoint = f'{destination_endpoint}/api/library-elements/'
payload = {
'uid': content["result"]["uid"],
'folderUid': content["result"]["folderUid"],
'folderUid': panel_folder_uid if panel_folder_uid != 'general' else '',
'name': panel_name,
'model': content["result"]["model"],
'kind': content["result"]["kind"],
}
(status, content) = send_grafana_post(f'{destination_endpoint}/api/library-elements/',
json.dumps(payload), http_headers)
(status, content) = send_grafana_post(endpoint, json.dumps(payload), http_headers)
if status >= 400:
if 'name or UID already exists' in content.get('message', ''):
send_grafana_patch(f'{destination_endpoint}/api/library-elements/{library_panel_uid}',
json.dumps(payload), http_headers)
endpoint = f'{destination_endpoint}/api/library-elements/{library_panel_uid}'
(status, content) = send_grafana_get(endpoint, http_headers)

payload["version"] = content["result"]["version"] # avoid version mismatch
(status, content) = send_grafana_patch(endpoint, json.dumps(payload), http_headers)
else:
logger.error(json.dumps(content))

Expand Down
Loading

0 comments on commit ff346c2

Please sign in to comment.