Browse Source

Catchup YAPF formatting

Change-Id: Ic54f77b4b0bdd9199fbc10dfdfc43d3af8f0bfd1
Scott Hussey 6 months ago
parent
commit
6ca7aa4bff

+ 3
- 3
docs/source/_static/drydock.conf.sample View File

@@ -404,12 +404,12 @@
404 404
 # Timeout in minutes for deploying a node (integer value)
405 405
 #deploy_node = 45
406 406
 
407
-# Timeout in minutes for relabeling a node (integer value)
408
-#relabel_node = 5
409
-
410 407
 # Timeout in minutes between deployment completion and the all boot actions
411 408
 # reporting status (integer value)
412 409
 #bootaction_final_status = 15
413 410
 
414 411
 # Timeout in minutes for releasing a node (integer value)
415 412
 #destroy_node = 30
413
+
414
+# Timeout in minutes for relabeling a node (integer value)
415
+#relabel_node = 5

+ 9
- 0
etc/drydock/drydock.conf.sample View File

@@ -276,6 +276,9 @@
276 276
 # Logger name for Node driver logging (string value)
277 277
 #nodedriver_logger_name = ${global_logger_name}.nodedriver
278 278
 
279
+# Logger name for Kubernetes driver logging (string value)
280
+#kubernetesdriver_logger_name = ${global_logger_name}.kubernetesdriver
281
+
279 282
 # Logger name for API server logging (string value)
280 283
 #control_logger_name = ${global_logger_name}.control
281 284
 
@@ -350,6 +353,9 @@
350 353
 # Module path string of the Node driver to enable (string value)
351 354
 #node_driver = drydock_provisioner.drivers.node.maasdriver.driver.MaasNodeDriver
352 355
 
356
+# Module path string of the Kubernetes driver to enable (string value)
357
+#kubernetes_driver = drydock_provisioner.drivers.kubernetes.promenade_driver.driver.PromenadeDriver
358
+
353 359
 # Module path string of the Network driver enable (string value)
354 360
 #network_driver = <None>
355 361
 
@@ -404,3 +410,6 @@
404 410
 
405 411
 # Timeout in minutes for releasing a node (integer value)
406 412
 #destroy_node = 30
413
+
414
+# Timeout in minutes for relabeling a node (integer value)
415
+#relabel_node = 5

+ 4
- 0
etc/drydock/policy.yaml.sample View File

@@ -38,6 +38,10 @@
38 38
 # POST  /api/v1.0/tasks
39 39
 #"physical_provisioner:destroy_nodes": "role:admin"
40 40
 
41
+# Create relabel_nodes task
42
+# POST  /api/v1.0/tasks
43
+#"physical_provisioner:relabel_nodes": "role:admin"
44
+
41 45
 # Read build data for a node
42 46
 # GET  /api/v1.0/nodes/{nodename}/builddata
43 47
 #"physical_provisioner:read_build_data": "role:admin"

+ 2
- 2
python/drydock_provisioner/cli/part/commands.py View File

@@ -64,8 +64,8 @@ def part_list(ctx):
64 64
     """List parts of a design."""
65 65
     click.echo(
66 66
         json.dumps(
67
-            PartList(ctx.obj['CLIENT'], design_id=ctx.obj['DESIGN_ID'])
68
-            .invoke()))
67
+            PartList(ctx.obj['CLIENT'],
68
+                     design_id=ctx.obj['DESIGN_ID']).invoke()))
69 69
 
70 70
 
71 71
 @part.command(name='show')

+ 2
- 2
python/drydock_provisioner/cli/task/commands.py View File

@@ -79,8 +79,8 @@ def task_create(ctx,
79 79
                 if node_names else [],
80 80
                 rack_names=[x.strip() for x in rack_names.split(',')]
81 81
                 if rack_names else [],
82
-                node_tags=[x.strip() for x in node_tags.split(',')]
83
-                if node_tags else [],
82
+                node_tags=[x.strip()
83
+                           for x in node_tags.split(',')] if node_tags else [],
84 84
                 block=block,
85 85
                 poll_interval=poll_interval).invoke()))
86 86
 

+ 2
- 1
python/drydock_provisioner/drivers/kubernetes/promenade_driver/actions/k8s_node.py View File

@@ -58,7 +58,8 @@ class RelabelNode(PromenadeAction):
58 58
         for n in nodes:
59 59
             # Relabel node through Promenade
60 60
             try:
61
-                self.logger.info("Relabeling node %s with node label data." % n.name)
61
+                self.logger.info(
62
+                    "Relabeling node %s with node label data." % n.name)
62 63
 
63 64
                 labels_dict = n.get_node_labels()
64 65
                 msg = "Set labels %s for node %s" % (str(labels_dict), n.name)

+ 4
- 6
python/drydock_provisioner/drivers/kubernetes/promenade_driver/driver.py View File

@@ -35,8 +35,7 @@ class PromenadeDriver(KubernetesDriver):
35 35
     driver_desc = 'Promenade Kubernetes Driver'
36 36
 
37 37
     action_class_map = {
38
-        hd_fields.OrchestratorAction.RelabelNode:
39
-        RelabelNode,
38
+        hd_fields.OrchestratorAction.RelabelNode: RelabelNode,
40 39
     }
41 40
 
42 41
     def __init__(self, **kwargs):
@@ -103,8 +102,7 @@ class PromenadeDriver(KubernetesDriver):
103 102
                         action.start)
104 103
 
105 104
                 timeout = action_timeouts.get(
106
-                    task.action,
107
-                    config.config_mgr.conf.timeouts.relabel_node)
105
+                    task.action, config.config_mgr.conf.timeouts.relabel_node)
108 106
                 finished, running = concurrent.futures.wait(
109 107
                     subtask_futures.values(), timeout=(timeout * 60))
110 108
 
@@ -118,8 +116,8 @@ class PromenadeDriver(KubernetesDriver):
118 116
                     task.failure()
119 117
                 else:
120 118
                     if f.exception():
121
-                        msg = ("Subtask %s raised unexpected exception: %s"
122
-                               % (str(uuid.UUID(bytes=t)), str(f.exception())))
119
+                        msg = ("Subtask %s raised unexpected exception: %s" %
120
+                               (str(uuid.UUID(bytes=t)), str(f.exception())))
123 121
                         self.logger.error(msg, exc_info=f.exception())
124 122
                         task.add_status_msg(
125 123
                             msg=msg,

+ 9
- 13
python/drydock_provisioner/drivers/kubernetes/promenade_driver/promenade_client.py View File

@@ -22,6 +22,7 @@ from keystoneauth1 import exceptions as exc
22 22
 import drydock_provisioner.error as errors
23 23
 from drydock_provisioner.util import KeystoneUtils
24 24
 
25
+
25 26
 # TODO: Remove this local implementation of Promenade Session and client once
26 27
 # Promenade api client is available as part of Promenade project.
27 28
 class PromenadeSession(object):
@@ -35,10 +36,7 @@ class PromenadeSession(object):
35 36
         read timeout to use
36 37
     """
37 38
 
38
-    def __init__(self,
39
-                 scheme='http',
40
-                 marker=None,
41
-                 timeout=None):
39
+    def __init__(self, scheme='http', marker=None, timeout=None):
42 40
         self.logger = logging.getLogger(__name__)
43 41
         self.__session = requests.Session()
44 42
 
@@ -63,8 +61,8 @@ class PromenadeSession(object):
63 61
 
64 62
     def set_auth(self):
65 63
 
66
-            auth_header = self._auth_gen()
67
-            self.__session.headers.update(auth_header)
64
+        auth_header = self._auth_gen()
65
+        self.__session.headers.update(auth_header)
68 66
 
69 67
     def get(self, route, query=None, timeout=None):
70 68
         """
@@ -220,11 +218,10 @@ class PromenadeSession(object):
220 218
         try:
221 219
             ks_session = KeystoneUtils.get_session()
222 220
         except exc.AuthorizationFailure as aferr:
223
-            self.logger.error(
224
-                'Could not authorize against Keystone: %s',
225
-                str(aferr))
226
-            raise errors.DriverError('Could not authorize against Keystone: %s',
227
-                                     str(aferr))
221
+            self.logger.error('Could not authorize against Keystone: %s',
222
+                              str(aferr))
223
+            raise errors.DriverError(
224
+                'Could not authorize against Keystone: %s', str(aferr))
228 225
 
229 226
         return ks_session
230 227
 
@@ -235,8 +232,7 @@ class PromenadeSession(object):
235 232
 
236 233
         try:
237 234
             prom_endpoint = ks_session.get_endpoint(
238
-                interface='internal',
239
-                service_type='kubernetesprovisioner')
235
+                interface='internal', service_type='kubernetesprovisioner')
240 236
         except exc.EndpointNotFound:
241 237
             self.logger.error("Could not find an internal interface"
242 238
                               " defined in Keystone for Promenade")

+ 31
- 24
python/drydock_provisioner/drivers/node/maasdriver/actions/node.py View File

@@ -278,7 +278,8 @@ class DestroyNode(BaseMaasAction):
278 278
                                                       site_design)
279 279
         for n in nodes:
280 280
             try:
281
-                machine = machine_list.identify_baremetal_node(n, update_name=False)
281
+                machine = machine_list.identify_baremetal_node(
282
+                    n, update_name=False)
282 283
 
283 284
                 if machine is None:
284 285
                     msg = "Could not locate machine for node {}".format(n.name)
@@ -297,7 +298,8 @@ class DestroyNode(BaseMaasAction):
297 298
                     try:
298 299
                         machine.release(erase_disk=True, quick_erase=True)
299 300
                     except errors.DriverError:
300
-                        msg = "Error Releasing node {}, skipping".format(n.name)
301
+                        msg = "Error Releasing node {}, skipping".format(
302
+                            n.name)
301 303
                         self.logger.warning(msg)
302 304
                         self.task.add_status_msg(
303 305
                             msg=msg, error=True, ctx=n.name, ctx_type='node')
@@ -306,25 +308,26 @@ class DestroyNode(BaseMaasAction):
306 308
 
307 309
                     # node release with erase disk will take sometime monitor it
308 310
                     attempts = 0
309
-                    max_attempts = (config.config_mgr.conf.timeouts.destroy_node
310
-                                    * 60) // config.config_mgr.conf.maasdriver.poll_interval
311
+                    max_attempts = (
312
+                        config.config_mgr.conf.timeouts.destroy_node *
313
+                        60) // config.config_mgr.conf.maasdriver.poll_interval
311 314
 
312
-                    while (attempts < max_attempts
313
-                           and (not machine.status_name.startswith('Ready')
314
-                                and not machine.status_name.startswith(
315
-                                        'Failed'))):
315
+                    while (attempts < max_attempts and
316
+                           (not machine.status_name.startswith('Ready')
317
+                            and not machine.status_name.startswith('Failed'))):
316 318
                         attempts = attempts + 1
317 319
                         time.sleep(
318 320
                             config.config_mgr.conf.maasdriver.poll_interval)
319 321
                         try:
320 322
                             machine.refresh()
321 323
                             self.logger.debug(
322
-                                "Polling node {} status attempt {:d} of {:d}: {}".format(
323
-                                    n.name, attempts, max_attempts,
324
-                                    machine.status_name))
324
+                                "Polling node {} status attempt {:d} of {:d}: {}"
325
+                                .format(n.name, attempts, max_attempts,
326
+                                        machine.status_name))
325 327
                         except Exception:
326 328
                             self.logger.warning(
327
-                                "Error updating node {} status during release node, will re-attempt.".format(n.name))
329
+                                "Error updating node {} status during release node, will re-attempt."
330
+                                .format(n.name))
328 331
                     if machine.status_name.startswith('Ready'):
329 332
                         msg = "Node {} released and disk erased.".format(
330 333
                             n.name)
@@ -354,8 +357,8 @@ class DestroyNode(BaseMaasAction):
354 357
                 try:
355 358
                     if n.oob_type == 'libvirt':
356 359
                         self.logger.info(
357
-                            'Resetting MaaS virsh power parameters for node {}.'.format(
358
-                                n.name))
360
+                            'Resetting MaaS virsh power parameters for node {}.'
361
+                            .format(n.name))
359 362
                         # setting power type attibutes to empty string
360 363
                         # will remove them from maas BMC table
361 364
                         machine.reset_power_parameters()
@@ -363,8 +366,8 @@ class DestroyNode(BaseMaasAction):
363 366
                     pass
364 367
 
365 368
                 machine.delete()
366
-                msg = "Deleted Node: {} in status: {}.".format(n.name,
367
-                                                               machine.status_name)
369
+                msg = "Deleted Node: {} in status: {}.".format(
370
+                    n.name, machine.status_name)
368 371
                 self.logger.info(msg)
369 372
                 self.task.add_status_msg(
370 373
                     msg=msg, error=False, ctx=n.name, ctx_type='node')
@@ -1147,16 +1150,17 @@ class ConfigureHardware(BaseMaasAction):
1147 1150
 
1148 1151
                         # Poll machine status
1149 1152
                         attempts = 0
1150
-                        max_attempts = (config.config_mgr.conf.timeouts.configure_hardware
1151
-                                        * 60) // config.config_mgr.conf.maasdriver.poll_interval
1153
+                        max_attempts = (
1154
+                            config.config_mgr.conf.timeouts.configure_hardware
1155
+                            * 60
1156
+                        ) // config.config_mgr.conf.maasdriver.poll_interval
1152 1157
 
1153 1158
                         while (attempts < max_attempts and
1154 1159
                                (machine.status_name != 'Ready' and
1155 1160
                                 not machine.status_name.startswith('Failed'))):
1156 1161
                             attempts = attempts + 1
1157
-                            time.sleep(
1158
-                                config.config_mgr.conf.maasdriver.poll_interval
1159
-                            )
1162
+                            time.sleep(config.config_mgr.conf.maasdriver.
1163
+                                       poll_interval)
1160 1164
                             try:
1161 1165
                                 machine.refresh()
1162 1166
                                 self.logger.debug(
@@ -1226,7 +1230,9 @@ class ConfigureHardware(BaseMaasAction):
1226 1230
             except Exception as ex:
1227 1231
                 msg = "Error commissioning node %s: %s" % (n.name, str(ex))
1228 1232
                 self.logger.warning(msg)
1229
-                self.logger.debug("Unhandled exception attempting to commission node.", exc_info=ex)
1233
+                self.logger.debug(
1234
+                    "Unhandled exception attempting to commission node.",
1235
+                    exc_info=ex)
1230 1236
                 self.task.add_status_msg(
1231 1237
                     msg=msg, error=True, ctx=n.name, ctx_type='node')
1232 1238
                 self.task.failure(focus=n.get_id())
@@ -2312,8 +2318,9 @@ class DeployNode(BaseMaasAction):
2312 2318
                 continue
2313 2319
 
2314 2320
             attempts = 0
2315
-            max_attempts = (config.config_mgr.conf.timeouts.deploy_node
2316
-                            * 60) // config.config_mgr.conf.maasdriver.poll_interval
2321
+            max_attempts = (
2322
+                config.config_mgr.conf.timeouts.deploy_node *
2323
+                60) // config.config_mgr.conf.maasdriver.poll_interval
2317 2324
 
2318 2325
             while (attempts < max_attempts
2319 2326
                    and (not machine.status_name.startswith('Deployed')

+ 2
- 2
python/drydock_provisioner/drivers/node/maasdriver/models/machine.py View File

@@ -423,8 +423,8 @@ class Machine(model_base.ResourceBase):
423 423
             return True
424 424
 
425 425
         raise errors.DriverError(
426
-            "Failed updating power parameters MAAS url {} - return code {}\n{}".format(
427
-                url, resp.status_code.resp.text))
426
+            "Failed updating power parameters MAAS url {} - return code {}\n{}"
427
+            .format(url, resp.status_code.resp.text))
428 428
 
429 429
     def to_dict(self):
430 430
         """Serialize this resource instance into a dict.

+ 2
- 1
python/drydock_provisioner/objects/bootaction.py View File

@@ -87,7 +87,8 @@ class BootAction(base.DrydockPersistentObject, base.DrydockObject):
87 87
         for a in self.asset_list:
88 88
             if type_filter is None or (type_filter is not None
89 89
                                        and a.type == type_filter):
90
-                a.render(nodename, site_design, action_id, action_key, design_ref)
90
+                a.render(nodename, site_design, action_id, action_key,
91
+                         design_ref)
91 92
                 assets.append(a)
92 93
 
93 94
         return assets

+ 6
- 6
python/drydock_provisioner/objects/fields.py View File

@@ -70,12 +70,12 @@ class OrchestratorAction(BaseDrydockEnum):
70 70
 
71 71
     ALL = (Noop, ValidateDesign, VerifySite, PrepareSite, VerifyNodes,
72 72
            PrepareNodes, DeployNodes, BootactionReport, DestroyNodes,
73
-           RelabelNodes, ConfigNodePxe, SetNodeBoot, PowerOffNode,
74
-           PowerOnNode, PowerCycleNode, InterrogateOob, RelabelNode,
75
-           CreateNetworkTemplate, CreateStorageTemplate, CreateBootMedia,
76
-           PrepareHardwareConfig, ConfigureHardware, InterrogateNode,
77
-           ApplyNodeNetworking, ApplyNodeStorage, ApplyNodePlatform,
78
-           DeployNode, DestroyNode, ConfigureNodeProvisioner)
73
+           RelabelNodes, ConfigNodePxe, SetNodeBoot, PowerOffNode, PowerOnNode,
74
+           PowerCycleNode, InterrogateOob, RelabelNode, CreateNetworkTemplate,
75
+           CreateStorageTemplate, CreateBootMedia, PrepareHardwareConfig,
76
+           ConfigureHardware, InterrogateNode, ApplyNodeNetworking,
77
+           ApplyNodeStorage, ApplyNodePlatform, DeployNode, DestroyNode,
78
+           ConfigureNodeProvisioner)
79 79
 
80 80
 
81 81
 class OrchestratorActionField(fields.BaseEnumField):

+ 1
- 0
python/drydock_provisioner/objects/node.py View File

@@ -338,6 +338,7 @@ class BaremetalNode(drydock_provisioner.objects.hostprofile.HostProfile):
338 338
 
339 339
         return labels_dict
340 340
 
341
+
341 342
 @base.DrydockObjectRegistry.register
342 343
 class BaremetalNodeList(base.DrydockObjectListBase, base.DrydockObject):
343 344
 

+ 3
- 3
python/drydock_provisioner/objects/task.py View File

@@ -274,7 +274,8 @@ class Task(object):
274 274
                         "Bubbling subtask success for entity %s." % se)
275 275
                     self.result.add_success(se)
276 276
             else:
277
-                self.logger.debug("Skipping subtask success due to action filter.")
277
+                self.logger.debug(
278
+                    "Skipping subtask success due to action filter.")
278 279
             # All failures are bubbled up.
279 280
             if self.retry == 0 or (self.retry == st.retry):
280 281
                 for fe in st.result.failures:
@@ -283,8 +284,7 @@ class Task(object):
283 284
                     self.result.add_failure(fe)
284 285
             else:
285 286
                 self.logger.debug(
286
-                    "Skipping failures as they mismatch task retry sequence."
287
-                )
287
+                    "Skipping failures as they mismatch task retry sequence.")
288 288
 
289 289
     def align_result(self, action_filter=None, reset_status=True):
290 290
         """Align the result of this task with the combined results of all the subtasks.

+ 4
- 6
python/drydock_provisioner/orchestrator/actions/orchestrator.py View File

@@ -245,9 +245,8 @@ class DestroyNodes(BaseAction):
245 245
                     node_filter=self.task.node_filter)
246 246
                 self.task.register_subtask(node_release_task)
247 247
 
248
-            self.logger.info(
249
-                "Starting node driver task %s to Release nodes." %
250
-                (node_release_task.get_id()))
248
+            self.logger.info("Starting node driver task %s to Release nodes." %
249
+                             (node_release_task.get_id()))
251 250
             node_driver.execute_task(node_release_task.get_id())
252 251
 
253 252
             node_release_task = self.state_manager.get_task(
@@ -1079,9 +1078,8 @@ class RelabelNodes(BaseAction):
1079 1078
             node_filter=nf)
1080 1079
         self.task.register_subtask(relabel_node_task)
1081 1080
 
1082
-        self.logger.info(
1083
-            "Starting kubernetes driver task %s to relabel nodes." %
1084
-            (relabel_node_task.get_id()))
1081
+        self.logger.info("Starting kubernetes driver task %s to relabel nodes."
1082
+                         % (relabel_node_task.get_id()))
1085 1083
         kubernetes_driver.execute_task(relabel_node_task.get_id())
1086 1084
 
1087 1085
         relabel_node_task = self.state_manager.get_task(

+ 3
- 2
python/drydock_provisioner/orchestrator/orchestrator.py View File

@@ -109,8 +109,9 @@ class Orchestrator(object):
109 109
                 kubernetes_driver_class = getattr(
110 110
                     importlib.import_module(m), c, None)
111 111
                 if kubernetes_driver_class is not None:
112
-                    self.enabled_drivers['kubernetes'] = kubernetes_driver_class(
113
-                        state_manager=state_manager, orchestrator=self)
112
+                    self.enabled_drivers[
113
+                        'kubernetes'] = kubernetes_driver_class(
114
+                            state_manager=state_manager, orchestrator=self)
114 115
 
115 116
     def watch_for_tasks(self):
116 117
         """Start polling the database watching for Queued tasks to execute."""

+ 4
- 6
python/drydock_provisioner/orchestrator/validations/network_trunking_rational.py View File

@@ -40,17 +40,15 @@ class NetworkTrunkingRational(Validators):
40 40
                 )
41 41
 
42 42
             # trunking mode is disabled, default_network must be defined
43
-            if (network_link.trunk_mode ==
44
-                    hd_fields.NetworkLinkTrunkingMode.Disabled
45
-                    and network_link.native_network is None):
43
+            if (network_link.trunk_mode == hd_fields.NetworkLinkTrunkingMode.
44
+                    Disabled and network_link.native_network is None):
46 45
 
47 46
                 msg = 'Trunking mode is disabled, a trunking default_network must be defined'
48 47
                 self.report_error(
49 48
                     msg, [network_link.doc_ref],
50 49
                     "Non-trunked links must have a native network defined.")
51
-            elif (network_link.trunk_mode ==
52
-                  hd_fields.NetworkLinkTrunkingMode.Disabled
53
-                  and network_link.native_network is not None):
50
+            elif (network_link.trunk_mode == hd_fields.NetworkLinkTrunkingMode.
51
+                  Disabled and network_link.native_network is not None):
54 52
                 network = site_design.get_network(network_link.native_network)
55 53
                 if network and network.vlan_id:
56 54
                     msg = "Network link native network has a defined VLAN tag."

+ 14
- 13
python/drydock_provisioner/policy.py View File

@@ -38,15 +38,15 @@ class DrydockPolicy(object):
38 38
 
39 39
     # Orchestrator Policy
40 40
     task_rules = [
41
-        policy.DocumentedRuleDefault('physical_provisioner:read_task',
42
-                                     'role:admin', 'Get task status',
43
-                                     [{
44
-                                         'path': '/api/v1.0/tasks',
45
-                                         'method': 'GET'
46
-                                     }, {
47
-                                         'path': '/api/v1.0/tasks/{task_id}',
48
-                                         'method': 'GET'
49
-                                     }]),
41
+        policy.DocumentedRuleDefault(
42
+            'physical_provisioner:read_task', 'role:admin', 'Get task status',
43
+            [{
44
+                'path': '/api/v1.0/tasks',
45
+                'method': 'GET'
46
+            }, {
47
+                'path': '/api/v1.0/tasks/{task_id}',
48
+                'method': 'GET'
49
+            }]),
50 50
         policy.DocumentedRuleDefault('physical_provisioner:create_task',
51 51
                                      'role:admin', 'Create a task',
52 52
                                      [{
@@ -103,10 +103,11 @@ class DrydockPolicy(object):
103 103
                                      }]),
104 104
         policy.DocumentedRuleDefault(
105 105
             'physical_provisioner:read_build_data', 'role:admin',
106
-            'Read build data for a node',
107
-            [{
108
-                'path': '/api/v1.0/nodes/{nodename}/builddata',
109
-                'method': 'GET',
106
+            'Read build data for a node', [{
107
+                'path':
108
+                '/api/v1.0/nodes/{nodename}/builddata',
109
+                'method':
110
+                'GET',
110 111
             }]),
111 112
     ]
112 113
 

+ 4
- 5
python/drydock_provisioner/statemgmt/state.py View File

@@ -169,9 +169,8 @@ class DrydockState(object):
169 169
             with self.db_engine.connect() as conn:
170 170
                 if allowed_actions is None:
171 171
                     query = self.tasks_tbl.select().where(
172
-                        self.tasks_tbl.c.status ==
173
-                        hd_fields.TaskStatus.Queued).order_by(
174
-                            self.tasks_tbl.c.created.asc())
172
+                        self.tasks_tbl.c.status == hd_fields.TaskStatus.
173
+                        Queued).order_by(self.tasks_tbl.c.created.asc())
175 174
                     rs = conn.execute(query)
176 175
                 else:
177 176
                     query = sql.text("SELECT * FROM tasks WHERE "
@@ -340,8 +339,8 @@ class DrydockState(object):
340 339
         try:
341 340
             with self.db_engine.connect() as conn:
342 341
                 query = self.active_instance_tbl.update().where(
343
-                    self.active_instance_tbl.c.identity ==
344
-                    leader_id.bytes).values(last_ping=datetime.utcnow())
342
+                    self.active_instance_tbl.c.identity == leader_id.
343
+                    bytes).values(last_ping=datetime.utcnow())
345 344
                 rs = conn.execute(query)
346 345
                 rc = rs.rowcount
347 346
 

+ 6
- 3
python/tests/integration/postgres/test_api_bootaction.py View File

@@ -26,7 +26,8 @@ from drydock_provisioner.control.api import start_api
26 26
 class TestClass(object):
27 27
     def test_bootaction_context(self, falcontest, seed_bootaction):
28 28
         """Test that the API will return a boot action context"""
29
-        url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction['nodename']
29
+        url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction[
30
+            'nodename']
30 31
         auth_hdr = {'X-Bootaction-Key': "%s" % seed_bootaction['identity_key']}
31 32
 
32 33
         result = falcontest.simulate_get(url, headers=auth_hdr)
@@ -47,7 +48,8 @@ class TestClass(object):
47 48
 
48 49
     def test_bootaction_context_noauth(self, falcontest, seed_bootaction):
49 50
         """Test that the API will return a boot action context"""
50
-        url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction['nodename']
51
+        url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction[
52
+            'nodename']
51 53
 
52 54
         result = falcontest.simulate_get(url)
53 55
 
@@ -55,7 +57,8 @@ class TestClass(object):
55 57
 
56 58
     def test_bootaction_context_badauth(self, falcontest, seed_bootaction):
57 59
         """Test that the API will return a boot action context"""
58
-        url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction['nodename']
60
+        url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction[
61
+            'nodename']
59 62
         auth_hdr = {'X-Bootaction-Key': 'deadbeef'}
60 63
 
61 64
         result = falcontest.simulate_get(url, headers=auth_hdr)

+ 2
- 3
python/tests/integration/postgres/test_postgres_builddata.py View File

@@ -90,9 +90,8 @@ class TestBuildData(object):
90 90
         }
91 91
 
92 92
         build_data_old = copy.deepcopy(build_data_latest)
93
-        build_data_old[
94
-            'collected_date'] = build_data_latest['collected_date'] - timedelta(
95
-                days=1)
93
+        build_data_old['collected_date'] = build_data_latest[
94
+            'collected_date'] - timedelta(days=1)
96 95
         build_data_old['task_id'] = uuid.uuid4()
97 96
 
98 97
         build_data1 = objects.BuildData(**build_data_latest)

+ 55
- 71
python/tests/unit/test_k8sdriver_promenade_client.py View File

@@ -25,16 +25,14 @@ PROM_URL = urlparse('http://promhost:80/api/v1.0')
25 25
 PROM_HOST = 'promhost'
26 26
 
27 27
 
28
-@mock.patch(
29
-    'drydock_provisioner.drivers.kubernetes'
30
-    '.promenade_driver.promenade_client'
31
-    '.PromenadeSession._get_prom_url',
32
-    return_value=PROM_URL)
33
-@mock.patch(
34
-    'drydock_provisioner.drivers.kubernetes'
35
-    '.promenade_driver.promenade_client'
36
-    '.PromenadeSession.set_auth',
37
-    return_value=None)
28
+@mock.patch('drydock_provisioner.drivers.kubernetes'
29
+            '.promenade_driver.promenade_client'
30
+            '.PromenadeSession._get_prom_url',
31
+            return_value=PROM_URL)
32
+@mock.patch('drydock_provisioner.drivers.kubernetes'
33
+            '.promenade_driver.promenade_client'
34
+            '.PromenadeSession.set_auth',
35
+            return_value=None)
38 36
 @responses.activate
39 37
 def test_put(patch1, patch2):
40 38
     """
@@ -47,51 +45,43 @@ def test_put(patch1, patch2):
47 45
         status=200)
48 46
 
49 47
     prom_session = PromenadeSession()
50
-    result = prom_session.put('v1.0/node-label/n1',
51
-                              body='{"key1":"label1"}',
52
-                              timeout=(60, 60))
48
+    result = prom_session.put(
49
+        'v1.0/node-label/n1', body='{"key1":"label1"}', timeout=(60, 60))
53 50
 
54 51
     assert PROM_HOST == prom_session.host
55 52
     assert result.status_code == 200
56 53
 
57 54
 
58
-@mock.patch(
59
-    'drydock_provisioner.drivers.kubernetes'
60
-    '.promenade_driver.promenade_client'
61
-    '.PromenadeSession._get_prom_url',
62
-    return_value=PROM_URL)
63
-@mock.patch(
64
-    'drydock_provisioner.drivers.kubernetes'
65
-    '.promenade_driver.promenade_client'
66
-    '.PromenadeSession.set_auth',
67
-    return_value=None)
55
+@mock.patch('drydock_provisioner.drivers.kubernetes'
56
+            '.promenade_driver.promenade_client'
57
+            '.PromenadeSession._get_prom_url',
58
+            return_value=PROM_URL)
59
+@mock.patch('drydock_provisioner.drivers.kubernetes'
60
+            '.promenade_driver.promenade_client'
61
+            '.PromenadeSession.set_auth',
62
+            return_value=None)
68 63
 @responses.activate
69 64
 def test_get(patch1, patch2):
70 65
     """
71 66
     Test get functionality
72 67
     """
73 68
     responses.add(
74
-        responses.GET,
75
-        'http://promhost:80/api/v1.0/node-label/n1',
76
-        status=200)
69
+        responses.GET, 'http://promhost:80/api/v1.0/node-label/n1', status=200)
77 70
 
78 71
     prom_session = PromenadeSession()
79
-    result = prom_session.get('v1.0/node-label/n1',
80
-                              timeout=(60, 60))
72
+    result = prom_session.get('v1.0/node-label/n1', timeout=(60, 60))
81 73
 
82 74
     assert result.status_code == 200
83 75
 
84 76
 
85
-@mock.patch(
86
-    'drydock_provisioner.drivers.kubernetes'
87
-    '.promenade_driver.promenade_client'
88
-    '.PromenadeSession._get_prom_url',
89
-    return_value=PROM_URL)
90
-@mock.patch(
91
-    'drydock_provisioner.drivers.kubernetes'
92
-    '.promenade_driver.promenade_client'
93
-    '.PromenadeSession.set_auth',
94
-    return_value=None)
77
+@mock.patch('drydock_provisioner.drivers.kubernetes'
78
+            '.promenade_driver.promenade_client'
79
+            '.PromenadeSession._get_prom_url',
80
+            return_value=PROM_URL)
81
+@mock.patch('drydock_provisioner.drivers.kubernetes'
82
+            '.promenade_driver.promenade_client'
83
+            '.PromenadeSession.set_auth',
84
+            return_value=None)
95 85
 @responses.activate
96 86
 def test_post(patch1, patch2):
97 87
     """
@@ -104,24 +94,21 @@ def test_post(patch1, patch2):
104 94
         status=200)
105 95
 
106 96
     prom_session = PromenadeSession()
107
-    result = prom_session.post('v1.0/node-label/n1',
108
-                               body='{"key1":"label1"}',
109
-                               timeout=(60, 60))
97
+    result = prom_session.post(
98
+        'v1.0/node-label/n1', body='{"key1":"label1"}', timeout=(60, 60))
110 99
 
111 100
     assert PROM_HOST == prom_session.host
112 101
     assert result.status_code == 200
113 102
 
114 103
 
115
-@mock.patch(
116
-    'drydock_provisioner.drivers.kubernetes'
117
-    '.promenade_driver.promenade_client'
118
-    '.PromenadeSession._get_prom_url',
119
-    return_value=PROM_URL)
120
-@mock.patch(
121
-    'drydock_provisioner.drivers.kubernetes'
122
-    '.promenade_driver.promenade_client'
123
-    '.PromenadeSession.set_auth',
124
-    return_value=None)
104
+@mock.patch('drydock_provisioner.drivers.kubernetes'
105
+            '.promenade_driver.promenade_client'
106
+            '.PromenadeSession._get_prom_url',
107
+            return_value=PROM_URL)
108
+@mock.patch('drydock_provisioner.drivers.kubernetes'
109
+            '.promenade_driver.promenade_client'
110
+            '.PromenadeSession.set_auth',
111
+            return_value=None)
125 112
 @responses.activate
126 113
 def test_relabel_node(patch1, patch2):
127 114
     """
@@ -141,16 +128,14 @@ def test_relabel_node(patch1, patch2):
141 128
     assert result == {"key1": "label1"}
142 129
 
143 130
 
144
-@mock.patch(
145
-    'drydock_provisioner.drivers.kubernetes'
146
-    '.promenade_driver.promenade_client'
147
-    '.PromenadeSession._get_prom_url',
148
-    return_value=PROM_URL)
149
-@mock.patch(
150
-    'drydock_provisioner.drivers.kubernetes'
151
-    '.promenade_driver.promenade_client'
152
-    '.PromenadeSession.set_auth',
153
-    return_value=None)
131
+@mock.patch('drydock_provisioner.drivers.kubernetes'
132
+            '.promenade_driver.promenade_client'
133
+            '.PromenadeSession._get_prom_url',
134
+            return_value=PROM_URL)
135
+@mock.patch('drydock_provisioner.drivers.kubernetes'
136
+            '.promenade_driver.promenade_client'
137
+            '.PromenadeSession.set_auth',
138
+            return_value=None)
154 139
 @responses.activate
155 140
 def test_relabel_node_403_status(patch1, patch2):
156 141
     """
@@ -167,16 +152,15 @@ def test_relabel_node_403_status(patch1, patch2):
167 152
     with pytest.raises(errors.ClientForbiddenError):
168 153
         prom_client.relabel_node('n1', {"key1": "label1"})
169 154
 
170
-@mock.patch(
171
-    'drydock_provisioner.drivers.kubernetes'
172
-    '.promenade_driver.promenade_client'
173
-    '.PromenadeSession._get_prom_url',
174
-    return_value=PROM_URL)
175
-@mock.patch(
176
-    'drydock_provisioner.drivers.kubernetes'
177
-    '.promenade_driver.promenade_client'
178
-    '.PromenadeSession.set_auth',
179
-    return_value=None)
155
+
156
+@mock.patch('drydock_provisioner.drivers.kubernetes'
157
+            '.promenade_driver.promenade_client'
158
+            '.PromenadeSession._get_prom_url',
159
+            return_value=PROM_URL)
160
+@mock.patch('drydock_provisioner.drivers.kubernetes'
161
+            '.promenade_driver.promenade_client'
162
+            '.PromenadeSession.set_auth',
163
+            return_value=None)
180 164
 @responses.activate
181 165
 def test_relabel_node_401_status(patch1, patch2):
182 166
     """

Loading…
Cancel
Save