Skip to content
This repository was archived by the owner on Feb 3, 2021. It is now read-only.

Commit eef36dc

Browse files
authored
Feature: 0.9.0 deprecated code removal (#645)
* remove deprecated code * remove deprecated tests, yapf test directory * add import * remove unused test * remove deprecated field name in tests * update test parameter to non deprecated name
1 parent 9d554c3 commit eef36dc

File tree

18 files changed

+150
-220
lines changed

18 files changed

+150
-220
lines changed

aztk/models/cluster_configuration.py

Lines changed: 1 addition & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import aztk.error as error
22
from aztk.core.models import Model, fields
3-
from aztk.utils import deprecate, deprecated, helpers
3+
from aztk.utils import helpers
44

55
from .custom_script import CustomScript
66
from .file_share import FileShare
@@ -41,37 +41,8 @@ class ClusterConfiguration(Model):
4141
scheduling_target = fields.Enum(SchedulingTarget, default=None)
4242

4343
def __init__(self, *args, **kwargs):
44-
if 'vm_count' in kwargs:
45-
deprecate("0.9.0", "vm_count is deprecated for ClusterConfiguration.", "Please use size instead.")
46-
kwargs['size'] = kwargs.pop('vm_count')
47-
48-
if 'vm_low_pri_count' in kwargs:
49-
deprecate("vm_low_pri_count is deprecated for ClusterConfiguration.",
50-
"Please use size_low_priority instead.")
51-
kwargs['size_low_priority'] = kwargs.pop('vm_low_pri_count')
52-
5344
super().__init__(*args, **kwargs)
5445

55-
@property
56-
@deprecated("0.9.0")
57-
def vm_count(self):
58-
return self.size
59-
60-
@vm_count.setter
61-
@deprecated("0.9.0")
62-
def vm_count(self, value):
63-
self.size = value
64-
65-
@property
66-
@deprecated("0.9.0")
67-
def vm_low_pri_count(self):
68-
return self.size_low_priority
69-
70-
@vm_low_pri_count.setter
71-
@deprecated("0.9.0")
72-
def vm_low_pri_count(self, value):
73-
self.size_low_priority = value
74-
7546
def mixed_mode(self) -> bool:
7647
"""
7748
Return:
@@ -103,9 +74,5 @@ def __validate__(self) -> bool:
10374
"You must configure a VNET to use AZTK in mixed mode (dedicated and low priority nodes). Set the VNET's subnet_id in your cluster.yaml or with a parameter (--subnet-id)."
10475
)
10576

106-
if self.custom_scripts:
107-
deprecate("0.9.0", "Custom scripts are DEPRECATED.",
108-
"Use plugins instead. See https://aztk.readthedocs.io/en/v0.7.0/15-plugins.html.")
109-
11077
if self.scheduling_target == SchedulingTarget.Dedicated and self.size == 0:
11178
raise error.InvalidModelError("Scheduling target cannot be Dedicated if dedicated vm size is 0")

aztk/models/toolkit.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from aztk.core.models import Model, fields
44
from aztk.error import InvalidModelError
5-
from aztk.utils import constants, deprecate
5+
from aztk.utils import constants
66

77

88
class ToolkitDefinition:
@@ -60,8 +60,6 @@ def __validate__(self):
6060
if self.version not in toolkit_def.versions:
6161
raise InvalidModelError("Toolkit '{0}' with version '{1}' is not available. Use one of: {2}".format(
6262
self.software, self.version, toolkit_def.versions))
63-
if self.version == "1.6":
64-
deprecate("0.9.0", "Spark version 1.6 is being deprecated for Aztk.", "Please use 2.1 and above.")
6563

6664
if self.environment:
6765
if self.environment not in toolkit_def.environments:

aztk/spark/helpers/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
# ALL FILES IN THIS DIRECTORY ARE DEPRECATED, WILL BE REMOTE IN v0.9.0
1+
# ALL FILES IN THIS DIRECTORY ARE DEPRECATED, WILL BE REMOTE IN v0.10.0

aztk_cli/config.py

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,11 @@
11
import os
2+
23
import yaml
4+
35
import aztk.spark
4-
from aztk.spark.models import (
5-
SecretsConfiguration,
6-
ClusterConfiguration,
7-
SchedulingTarget,
8-
)
9-
from aztk.utils import deprecate
106
from aztk.models import Toolkit
117
from aztk.models.plugins.internal import PluginReference
8+
from aztk.spark.models import (ClusterConfiguration, SchedulingTarget, SecretsConfiguration)
129

1310

1411
def load_aztk_secrets() -> SecretsConfiguration:
@@ -46,11 +43,6 @@ def _load_config_file(path: str):
4643

4744

4845
def _merge_secrets_dict(secrets: SecretsConfiguration, secrets_config):
49-
if 'default' in secrets_config:
50-
deprecate("0.9.0", "default key in secrets.yaml is deprecated.",
51-
"Place all child parameters directly at the root")
52-
secrets_config = dict(**secrets_config, **secrets_config.pop('default'))
53-
5446
other = SecretsConfiguration.from_dict(secrets_config)
5547
secrets.merge(other)
5648

aztk_cli/spark/endpoints/cluster/cluster_create.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
import aztk.spark
55
from aztk.spark.models import ClusterConfiguration, UserConfiguration
6-
from aztk.utils import deprecate
76
from aztk_cli import config, log, utils
87
from aztk_cli.config import load_aztk_spark_config
98

@@ -39,9 +38,6 @@ def execute(args: typing.NamedTuple):
3938
# read cluster.yaml configuration file, overwrite values with args
4039
file_config, wait = config.read_cluster_config()
4140
cluster_conf.merge(file_config)
42-
if args.size_low_pri is not None:
43-
deprecate("0.9.0", "--size-low-pri has been deprecated.", "Please use --size-low-priority.")
44-
args.size_low_priority = args.size_low_pri
4541

4642
cluster_conf.merge(
4743
ClusterConfiguration(

tests/core/test_models.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,8 @@ class SimpleStateModel(Model):
7777

7878
with pytest.raises(
7979
InvalidModelFieldError,
80-
match="SimpleStateModel state unknown is not a valid option. Use one of \\['creating', 'ready', 'deleting'\\]"):
80+
match=
81+
"SimpleStateModel state unknown is not a valid option. Use one of \\['creating', 'ready', 'deleting'\\]"):
8182

8283
obj = SimpleStateModel(state="unknown")
8384
obj.validate()
@@ -234,15 +235,15 @@ class UserList(Model):
234235
obj = UserList()
235236
obj.validate()
236237

237-
assert isinstance(obj.infos, (list, ))
238+
assert isinstance(obj.infos, (list,))
238239
assert len(obj.infos) == 0
239240

240241
infos = obj.infos
241242
infos.append(UserInfo())
242243
assert len(obj.infos) == 1
243244

244245
obj2 = UserList(infos=None)
245-
assert isinstance(obj2.infos, (list, ))
246+
assert isinstance(obj2.infos, (list,))
246247
assert len(obj2.infos) == 0
247248

248249

@@ -253,7 +254,7 @@ class UserList(Model):
253254
obj = UserList(infos=[None, None])
254255
obj.validate()
255256

256-
assert isinstance(obj.infos, (list, ))
257+
assert isinstance(obj.infos, (list,))
257258
assert len(obj.infos) == 0
258259

259260

tests/integration_tests/spark/sdk/cluster/test_cluster.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from aztk.error import AztkError
1313
from aztk.utils import constants
1414
from aztk_cli import config
15-
from tests.integration_tests.spark.sdk.get_client import get_spark_client, get_test_suffix
15+
from tests.integration_tests.spark.sdk.get_client import (get_spark_client, get_test_suffix)
1616

1717
base_cluster_id = get_test_suffix("cluster")
1818
spark_client = get_spark_client()
@@ -70,8 +70,8 @@ def test_create_cluster():
7070
# TODO: make Cluster Configuration more robust, test each value
7171
cluster_configuration = aztk.spark.models.ClusterConfiguration(
7272
cluster_id=test_id + base_cluster_id,
73-
vm_count=2,
74-
vm_low_pri_count=0,
73+
size=2,
74+
size_low_priority=0,
7575
vm_size="standard_f2",
7676
subnet_id=None,
7777
custom_scripts=None,
@@ -101,8 +101,8 @@ def test_list_clusters():
101101
test_id = "test-list-"
102102
cluster_configuration = aztk.spark.models.ClusterConfiguration(
103103
cluster_id=test_id + base_cluster_id,
104-
vm_count=2,
105-
vm_low_pri_count=0,
104+
size=2,
105+
size_low_priority=0,
106106
vm_size="standard_f2",
107107
subnet_id=None,
108108
custom_scripts=None,
@@ -126,8 +126,8 @@ def test_get_remote_login_settings():
126126
test_id = "test-get-remote-login-"
127127
cluster_configuration = aztk.spark.models.ClusterConfiguration(
128128
cluster_id=test_id + base_cluster_id,
129-
vm_count=2,
130-
vm_low_pri_count=0,
129+
size=2,
130+
size_low_priority=0,
131131
vm_size="standard_f2",
132132
subnet_id=None,
133133
custom_scripts=None,
@@ -154,8 +154,8 @@ def test_submit():
154154
test_id = "test-submit-"
155155
cluster_configuration = aztk.spark.models.ClusterConfiguration(
156156
cluster_id=test_id + base_cluster_id,
157-
vm_count=2,
158-
vm_low_pri_count=0,
157+
size=2,
158+
size_low_priority=0,
159159
vm_size="standard_f2",
160160
subnet_id=None,
161161
custom_scripts=None,
@@ -195,8 +195,8 @@ def test_get_application_log():
195195
test_id = "test-get-app-log-"
196196
cluster_configuration = aztk.spark.models.ClusterConfiguration(
197197
cluster_id=test_id + base_cluster_id,
198-
vm_count=2,
199-
vm_low_pri_count=0,
198+
size=2,
199+
size_low_priority=0,
200200
vm_size="standard_f2",
201201
subnet_id=None,
202202
custom_scripts=None,
@@ -256,8 +256,8 @@ def test_get_application_status_complete():
256256
test_id = "test-app-status-complete-"
257257
cluster_configuration = aztk.spark.models.ClusterConfiguration(
258258
cluster_id=test_id + base_cluster_id,
259-
vm_count=2,
260-
vm_low_pri_count=0,
259+
size=2,
260+
size_low_priority=0,
261261
vm_size="standard_f2",
262262
subnet_id=None,
263263
custom_scripts=None,
@@ -300,8 +300,8 @@ def test_delete_cluster():
300300
test_id = "test-delete-"
301301
cluster_configuration = aztk.spark.models.ClusterConfiguration(
302302
cluster_id=test_id + base_cluster_id,
303-
vm_count=2,
304-
vm_low_pri_count=0,
303+
size=2,
304+
size_low_priority=0,
305305
vm_size="standard_f2",
306306
subnet_id=None,
307307
custom_scripts=None,
@@ -326,8 +326,8 @@ def test_spark_processes_up():
326326
test_id = "test-spark-processes-up-"
327327
cluster_configuration = aztk.spark.models.ClusterConfiguration(
328328
cluster_id=test_id + base_cluster_id,
329-
vm_count=2,
330-
vm_low_pri_count=0,
329+
size=2,
330+
size_low_priority=0,
331331
vm_size="standard_f2",
332332
subnet_id=None,
333333
custom_scripts=None,

tests/integration_tests/spark/sdk/cluster/test_cluster_deprecated.py

Lines changed: 20 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,7 @@
1212
from aztk.error import AztkError
1313
from aztk.utils import constants
1414
from aztk_cli import config
15-
from tests.integration_tests.spark.sdk.get_client import get_spark_client, get_test_suffix
16-
15+
from tests.integration_tests.spark.sdk.get_client import (get_spark_client, get_test_suffix)
1716

1817
base_cluster_id = get_test_suffix("cluster")
1918
spark_client = get_spark_client()
@@ -71,8 +70,8 @@ def test_create_cluster():
7170
# TODO: make Cluster Configuration more robust, test each value
7271
cluster_configuration = aztk.spark.models.ClusterConfiguration(
7372
cluster_id=test_id + base_cluster_id,
74-
vm_count=2,
75-
vm_low_pri_count=0,
73+
size=2,
74+
size_low_priority=0,
7675
vm_size="standard_f2",
7776
subnet_id=None,
7877
custom_scripts=None,
@@ -103,8 +102,8 @@ def test_get_cluster():
103102
test_id = "test-get-"
104103
cluster_configuration = aztk.spark.models.ClusterConfiguration(
105104
cluster_id=test_id + base_cluster_id,
106-
vm_count=2,
107-
vm_low_pri_count=0,
105+
size=2,
106+
size_low_priority=0,
108107
vm_size="standard_f2",
109108
subnet_id=None,
110109
custom_scripts=None,
@@ -137,8 +136,8 @@ def test_list_clusters():
137136
test_id = "test-list-"
138137
cluster_configuration = aztk.spark.models.ClusterConfiguration(
139138
cluster_id=test_id + base_cluster_id,
140-
vm_count=2,
141-
vm_low_pri_count=0,
139+
size=2,
140+
size_low_priority=0,
142141
vm_size="standard_f2",
143142
subnet_id=None,
144143
custom_scripts=None,
@@ -164,8 +163,8 @@ def test_get_remote_login_settings():
164163
test_id = "test-get-remote-login-"
165164
cluster_configuration = aztk.spark.models.ClusterConfiguration(
166165
cluster_id=test_id + base_cluster_id,
167-
vm_count=2,
168-
vm_low_pri_count=0,
166+
size=2,
167+
size_low_priority=0,
169168
vm_size="standard_f2",
170169
subnet_id=None,
171170
custom_scripts=None,
@@ -195,8 +194,8 @@ def test_submit():
195194
test_id = "test-submit-"
196195
cluster_configuration = aztk.spark.models.ClusterConfiguration(
197196
cluster_id=test_id + base_cluster_id,
198-
vm_count=2,
199-
vm_low_pri_count=0,
197+
size=2,
198+
size_low_priority=0,
200199
vm_size="standard_f2",
201200
subnet_id=None,
202201
custom_scripts=None,
@@ -225,7 +224,7 @@ def test_submit():
225224
with pytest.warns(DeprecationWarning):
226225
spark_client.submit(
227226
cluster_id=cluster_configuration.cluster_id, application=application_configuration, wait=True)
228-
227+
229228
assert True
230229

231230
except (AztkError, BatchErrorException):
@@ -239,8 +238,8 @@ def test_get_application_log():
239238
test_id = "test-get-app-log-"
240239
cluster_configuration = aztk.spark.models.ClusterConfiguration(
241240
cluster_id=test_id + base_cluster_id,
242-
vm_count=2,
243-
vm_low_pri_count=0,
241+
size=2,
242+
size_low_priority=0,
244243
vm_size="standard_f2",
245244
subnet_id=None,
246245
custom_scripts=None,
@@ -303,8 +302,8 @@ def test_get_application_status_complete():
303302
test_id = "test-app-status-complete-"
304303
cluster_configuration = aztk.spark.models.ClusterConfiguration(
305304
cluster_id=test_id + base_cluster_id,
306-
vm_count=2,
307-
vm_low_pri_count=0,
305+
size=2,
306+
size_low_priority=0,
308307
vm_size="standard_f2",
309308
subnet_id=None,
310309
custom_scripts=None,
@@ -349,8 +348,8 @@ def test_delete_cluster():
349348
test_id = "test-delete-"
350349
cluster_configuration = aztk.spark.models.ClusterConfiguration(
351350
cluster_id=test_id + base_cluster_id,
352-
vm_count=2,
353-
vm_low_pri_count=0,
351+
size=2,
352+
size_low_priority=0,
354353
vm_size="standard_f2",
355354
subnet_id=None,
356355
custom_scripts=None,
@@ -376,8 +375,8 @@ def test_spark_processes_up():
376375
test_id = "test-spark-processes-up-"
377376
cluster_configuration = aztk.spark.models.ClusterConfiguration(
378377
cluster_id=test_id + base_cluster_id,
379-
vm_count=2,
380-
vm_low_pri_count=0,
378+
size=2,
379+
size_low_priority=0,
381380
vm_size="standard_f2",
382381
subnet_id=None,
383382
custom_scripts=None,

0 commit comments

Comments
 (0)