Browse Source

Add gate

This patch set puts in a rudimentary gate.

Change-Id: I3a2466bd7be5352b46273b385d215913eb8079ba
Signed-off-by: Tin Lam <tin@irrational.io>
Tin Lam 2 weeks ago
parent
commit
89dfec7b4c

+ 6
- 3
.zuul.yaml View File

@@ -9,7 +9,10 @@
9 9
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 10
 # See the License for the specific language governing permissions and
11 11
 # limitations under the License.
12
-
13 12
 - project:
14
-    templates:
15
-      - noop-jobs
13
+    check:
14
+      jobs:
15
+        - openstack-tox-pep8
16
+    gate:
17
+      jobs:
18
+        - openstack-tox-pep8

+ 53
- 38
spyglass/data_extractor/base.py View File

@@ -281,8 +281,8 @@ class BaseDataSourcePlugin(object):
281 281
 
282 282
         # For each host list fill host profile and network IPs
283 283
         for host in hosts:
284
-            host_name = host['name']
285
-            rack_name = host['rack_name']
284
+            host_name = host["name"]
285
+            rack_name = host["rack_name"]
286 286
 
287 287
             if rack_name not in baremetal:
288 288
                 baremetal[rack_name] = {}
@@ -290,32 +290,39 @@ class BaseDataSourcePlugin(object):
290 290
             # Prepare temp dict for each host and append it to baremetal
291 291
             # at a rack level
292 292
             temp_host = {}
293
-            if host['host_profile'] is None:
294
-                temp_host['host_profile'] = "#CHANGE_ME"
293
+            if host["host_profile"] is None:
294
+                temp_host["host_profile"] = "#CHANGE_ME"
295 295
             else:
296
-                temp_host['host_profile'] = host['host_profile']
296
+                temp_host["host_profile"] = host["host_profile"]
297 297
 
298 298
             # Get Host IPs from plugin
299 299
             temp_host_ips = self.get_ips(self.region, host_name)
300 300
 
301 301
             # Fill network IP for this host
302
-            temp_host['ip'] = {}
303
-            temp_host['ip']['oob'] = temp_host_ips[host_name].get(
304
-                'oob', "#CHANGE_ME")
305
-            temp_host['ip']['calico'] = temp_host_ips[host_name].get(
306
-                'calico', "#CHANGE_ME")
307
-            temp_host['ip']['oam'] = temp_host_ips[host_name].get(
308
-                'oam', "#CHANGE_ME")
309
-            temp_host['ip']['storage'] = temp_host_ips[host_name].get(
310
-                'storage', "#CHANGE_ME")
311
-            temp_host['ip']['overlay'] = temp_host_ips[host_name].get(
312
-                'overlay', "#CHANGE_ME")
313
-            temp_host['ip']['pxe'] = temp_host_ips[host_name].get(
314
-                'pxe', "#CHANGE_ME")
302
+            temp_host["ip"] = {}
303
+            temp_host["ip"]["oob"] = temp_host_ips[host_name].get(
304
+                "oob", "#CHANGE_ME"
305
+            )
306
+            temp_host["ip"]["calico"] = temp_host_ips[host_name].get(
307
+                "calico", "#CHANGE_ME"
308
+            )
309
+            temp_host["ip"]["oam"] = temp_host_ips[host_name].get(
310
+                "oam", "#CHANGE_ME"
311
+            )
312
+            temp_host["ip"]["storage"] = temp_host_ips[host_name].get(
313
+                "storage", "#CHANGE_ME"
314
+            )
315
+            temp_host["ip"]["overlay"] = temp_host_ips[host_name].get(
316
+                "overlay", "#CHANGE_ME"
317
+            )
318
+            temp_host["ip"]["pxe"] = temp_host_ips[host_name].get(
319
+                "pxe", "#CHANGE_ME"
320
+            )
315 321
 
316 322
             baremetal[rack_name][host_name] = temp_host
317
-        LOG.debug("Baremetal information:\n{}".format(
318
-            pprint.pformat(baremetal)))
323
+        LOG.debug(
324
+            "Baremetal information:\n{}".format(pprint.pformat(baremetal))
325
+        )
319 326
 
320 327
         return baremetal
321 328
 
@@ -348,19 +355,20 @@ class BaseDataSourcePlugin(object):
348 355
             site_info = location_data
349 356
 
350 357
         dns_data = self.get_dns_servers(self.region)
351
-        site_info['dns'] = dns_data
358
+        site_info["dns"] = dns_data
352 359
 
353 360
         ntp_data = self.get_ntp_servers(self.region)
354
-        site_info['ntp'] = ntp_data
361
+        site_info["ntp"] = ntp_data
355 362
 
356 363
         ldap_data = self.get_ldap_information(self.region)
357
-        site_info['ldap'] = ldap_data
364
+        site_info["ldap"] = ldap_data
358 365
 
359 366
         domain_data = self.get_domain_name(self.region)
360
-        site_info['domain'] = domain_data
367
+        site_info["domain"] = domain_data
361 368
 
362
-        LOG.debug("Extracted site information:\n{}".format(
363
-            pprint.pformat(site_info)))
369
+        LOG.debug(
370
+            "Extracted site information:\n{}".format(pprint.pformat(site_info))
371
+        )
364 372
 
365 373
         return site_info
366 374
 
@@ -393,21 +401,28 @@ class BaseDataSourcePlugin(object):
393 401
         # networks_to_scan, so look for these networks from the data
394 402
         # returned by plugin
395 403
         networks_to_scan = [
396
-            'calico', 'overlay', 'pxe', 'storage', 'oam', 'oob', 'ingress'
404
+            "calico",
405
+            "overlay",
406
+            "pxe",
407
+            "storage",
408
+            "oam",
409
+            "oob",
410
+            "ingress",
397 411
         ]
398
-        network_data['vlan_network_data'] = {}
412
+        network_data["vlan_network_data"] = {}
399 413
 
400 414
         for net in networks:
401 415
             tmp_net = {}
402
-            if net['name'] in networks_to_scan:
403
-                tmp_net['subnet'] = net.get('subnet', '#CHANGE_ME')
404
-                if ((net['name'] != 'ingress') and (net['name'] != 'oob')):
405
-                    tmp_net['vlan'] = net.get('vlan', '#CHANGE_ME')
416
+            if net["name"] in networks_to_scan:
417
+                tmp_net["subnet"] = net.get("subnet", "#CHANGE_ME")
418
+                if (net["name"] != "ingress") and (net["name"] != "oob"):
419
+                    tmp_net["vlan"] = net.get("vlan", "#CHANGE_ME")
406 420
 
407
-            network_data['vlan_network_data'][net['name']] = tmp_net
421
+            network_data["vlan_network_data"][net["name"]] = tmp_net
408 422
 
409
-        LOG.debug("Extracted network data:\n{}".format(
410
-            pprint.pformat(network_data)))
423
+        LOG.debug(
424
+            "Extracted network data:\n{}".format(pprint.pformat(network_data))
425
+        )
411 426
         return network_data
412 427
 
413 428
     def extract_data(self):
@@ -418,9 +433,9 @@ class BaseDataSourcePlugin(object):
418 433
         """
419 434
         LOG.info("Extract data from plugin")
420 435
         site_data = {}
421
-        site_data['baremetal'] = self.extract_baremetal_information()
422
-        site_data['site_info'] = self.extract_site_information()
423
-        site_data['network'] = self.extract_network_information()
436
+        site_data["baremetal"] = self.extract_baremetal_information()
437
+        site_data["site_info"] = self.extract_site_information()
438
+        site_data["network"] = self.extract_network_information()
424 439
         self.site_data = site_data
425 440
         return site_data
426 441
 

+ 5
- 2
spyglass/data_extractor/custom_exceptions.py View File

@@ -31,8 +31,11 @@ class NoSpecMatched(BaseError):
31 31
         self.specs = excel_specs
32 32
 
33 33
     def display_error(self):
34
-        print('No spec matched. Following are the available specs:\n'.format(
35
-            self.specs))
34
+        print(
35
+            "No spec matched. Following are the available specs:\n".format(
36
+                self.specs
37
+            )
38
+        )
36 39
         sys.exit(1)
37 40
 
38 41
 

+ 104
- 80
spyglass/data_extractor/plugins/formation.py View File

@@ -22,8 +22,11 @@ import urllib3
22 22
 from spyglass.data_extractor.base import BaseDataSourcePlugin
23 23
 
24 24
 from spyglass.data_extractor.custom_exceptions import (
25
-    ApiClientError, ConnectionError, MissingAttributeError,
26
-    TokenGenerationError)
25
+    ApiClientError,
26
+    ConnectionError,
27
+    MissingAttributeError,
28
+    TokenGenerationError,
29
+)
27 30
 
28 31
 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
29 32
 
@@ -41,8 +44,8 @@ class FormationPlugin(BaseDataSourcePlugin):
41 44
             LOG.info("Check spyglass --help for details")
42 45
             exit()
43 46
 
44
-        self.source_type = 'rest'
45
-        self.source_name = 'formation'
47
+        self.source_type = "rest"
48
+        self.source_name = "formation"
46 49
 
47 50
         # Configuration parameters
48 51
         self.formation_api_url = None
@@ -67,10 +70,10 @@ class FormationPlugin(BaseDataSourcePlugin):
67 70
         """ Sets the config params passed by CLI"""
68 71
         LOG.info("Plugin params passed:\n{}".format(pprint.pformat(conf)))
69 72
         self._validate_config_options(conf)
70
-        self.formation_api_url = conf['url']
71
-        self.user = conf['user']
72
-        self.password = conf['password']
73
-        self.token = conf.get('token', None)
73
+        self.formation_api_url = conf["url"]
74
+        self.user = conf["user"]
75
+        self.password = conf["password"]
76
+        self.token = conf.get("token", None)
74 77
 
75 78
         self._get_formation_client()
76 79
         self._update_site_and_zone(self.region)
@@ -78,21 +81,24 @@ class FormationPlugin(BaseDataSourcePlugin):
78 81
     def get_plugin_conf(self, kwargs):
79 82
         """ Validates the plugin param and return if success"""
80 83
         try:
81
-            assert (kwargs['formation_url']
82
-                    ) is not None, "formation_url is Not Specified"
83
-            url = kwargs['formation_url']
84
-            assert (kwargs['formation_user']
85
-                    ) is not None, "formation_user is Not Specified"
86
-            user = kwargs['formation_user']
87
-            assert (kwargs['formation_password']
88
-                    ) is not None, "formation_password is Not Specified"
89
-            password = kwargs['formation_password']
84
+            assert (
85
+                kwargs["formation_url"]
86
+            ) is not None, "formation_url is Not Specified"
87
+            url = kwargs["formation_url"]
88
+            assert (
89
+                kwargs["formation_user"]
90
+            ) is not None, "formation_user is Not Specified"
91
+            user = kwargs["formation_user"]
92
+            assert (
93
+                kwargs["formation_password"]
94
+            ) is not None, "formation_password is Not Specified"
95
+            password = kwargs["formation_password"]
90 96
         except AssertionError:
91 97
             LOG.error("Insufficient plugin parameter! Spyglass exited!")
92 98
             raise
93 99
             exit()
94 100
 
95
-        plugin_conf = {'url': url, 'user': user, 'password': password}
101
+        plugin_conf = {"url": url, "user": user, "password": password}
96 102
         return plugin_conf
97 103
 
98 104
     def _validate_config_options(self, conf):
@@ -129,21 +135,24 @@ class FormationPlugin(BaseDataSourcePlugin):
129 135
         if self.token:
130 136
             return self.token
131 137
 
132
-        url = self.formation_api_url + '/zones'
138
+        url = self.formation_api_url + "/zones"
133 139
         try:
134 140
             token_response = requests.get(
135 141
                 url,
136 142
                 auth=(self.user, self.password),
137
-                verify=self.client_config.verify_ssl)
143
+                verify=self.client_config.verify_ssl,
144
+            )
138 145
         except requests.exceptions.ConnectionError:
139
-            raise ConnectionError('Incorrect URL: {}'.format(url))
146
+            raise ConnectionError("Incorrect URL: {}".format(url))
140 147
 
141 148
         if token_response.status_code == 200:
142
-            self.token = token_response.json().get('X-Subject-Token', None)
149
+            self.token = token_response.json().get("X-Subject-Token", None)
143 150
         else:
144 151
             raise TokenGenerationError(
145
-                'Unable to generate token because {}'.format(
146
-                    token_response.reason))
152
+                "Unable to generate token because {}".format(
153
+                    token_response.reason
154
+                )
155
+            )
147 156
 
148 157
         return self.token
149 158
 
@@ -155,9 +164,10 @@ class FormationPlugin(BaseDataSourcePlugin):
155 164
         Generate the token and add it formation config object.
156 165
         """
157 166
         token = self._generate_token()
158
-        self.client_config.api_key = {'X-Auth-Token': self.user + '|' + token}
167
+        self.client_config.api_key = {"X-Auth-Token": self.user + "|" + token}
159 168
         self.formation_api_client = formation_client.ApiClient(
160
-            self.client_config)
169
+            self.client_config
170
+        )
161 171
 
162 172
     def _update_site_and_zone(self, region):
163 173
         """Get Zone name and Site name from region"""
@@ -169,8 +179,8 @@ class FormationPlugin(BaseDataSourcePlugin):
169 179
         # site = zone[:-1]
170 180
 
171 181
         self.region_zone_map[region] = {}
172
-        self.region_zone_map[region]['zone'] = zone
173
-        self.region_zone_map[region]['site'] = site
182
+        self.region_zone_map[region]["zone"] = zone
183
+        self.region_zone_map[region]["site"] = site
174 184
 
175 185
     def _get_zone_by_region_name(self, region_name):
176 186
         zone_api = formation_client.ZonesApi(self.formation_api_client)
@@ -248,7 +258,7 @@ class FormationPlugin(BaseDataSourcePlugin):
248 258
 
249 259
         return self.device_name_id_mapping.get(device_name, None)
250 260
 
251
-    def _get_racks(self, zone, rack_type='compute'):
261
+    def _get_racks(self, zone, rack_type="compute"):
252 262
         zone_id = self._get_zone_id_by_name(zone)
253 263
         rack_api = formation_client.RacksApi(self.formation_api_client)
254 264
         racks = rack_api.zones_zone_id_racks_get(zone_id)
@@ -296,35 +306,40 @@ class FormationPlugin(BaseDataSourcePlugin):
296 306
     # Implement Abstract functions
297 307
 
298 308
     def get_racks(self, region):
299
-        zone = self.region_zone_map[region]['zone']
300
-        return self._get_racks(zone, rack_type='compute')
309
+        zone = self.region_zone_map[region]["zone"]
310
+        return self._get_racks(zone, rack_type="compute")
301 311
 
302 312
     def get_hosts(self, region, rack=None):
303
-        zone = self.region_zone_map[region]['zone']
313
+        zone = self.region_zone_map[region]["zone"]
304 314
         zone_id = self._get_zone_id_by_name(zone)
305 315
         device_api = formation_client.DevicesApi(self.formation_api_client)
306 316
         control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id)
307 317
         compute_hosts = device_api.zones_zone_id_devices_get(
308
-            zone_id, type='KVM')
318
+            zone_id, type="KVM"
319
+        )
309 320
 
310 321
         hosts_list = []
311 322
         for host in control_hosts:
312 323
             self.device_name_id_mapping[host.aic_standard_name] = host.id
313
-            hosts_list.append({
314
-                'name': host.aic_standard_name,
315
-                'type': 'controller',
316
-                'rack_name': host.rack_name,
317
-                'host_profile': host.host_profile_name
318
-            })
324
+            hosts_list.append(
325
+                {
326
+                    "name": host.aic_standard_name,
327
+                    "type": "controller",
328
+                    "rack_name": host.rack_name,
329
+                    "host_profile": host.host_profile_name,
330
+                }
331
+            )
319 332
 
320 333
         for host in compute_hosts:
321 334
             self.device_name_id_mapping[host.aic_standard_name] = host.id
322
-            hosts_list.append({
323
-                'name': host.aic_standard_name,
324
-                'type': 'compute',
325
-                'rack_name': host.rack_name,
326
-                'host_profile': host.host_profile_name
327
-            })
335
+            hosts_list.append(
336
+                {
337
+                    "name": host.aic_standard_name,
338
+                    "type": "compute",
339
+                    "rack_name": host.rack_name,
340
+                    "host_profile": host.host_profile_name,
341
+                }
342
+            )
328 343
         """
329 344
         for host in itertools.chain(control_hosts, compute_hosts):
330 345
             self.device_name_id_mapping[host.aic_standard_name] = host.id
@@ -339,40 +354,43 @@ class FormationPlugin(BaseDataSourcePlugin):
339 354
         return hosts_list
340 355
 
341 356
     def get_networks(self, region):
342
-        zone = self.region_zone_map[region]['zone']
357
+        zone = self.region_zone_map[region]["zone"]
343 358
         zone_id = self._get_zone_id_by_name(zone)
344 359
         region_id = self._get_region_id_by_name(region)
345 360
         vlan_api = formation_client.VlansApi(self.formation_api_client)
346 361
         vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get(
347
-            zone_id, region_id)
362
+            zone_id, region_id
363
+        )
348 364
 
349 365
         # Case when vlans list is empty from
350 366
         # zones_zone_id_regions_region_id_vlans_get
351
-        if len(vlans) is 0:
367
+        if len(vlans) == 0:
352 368
             # get device-id from the first host and get the network details
353 369
             hosts = self.get_hosts(self.region)
354
-            host = hosts[0]['name']
370
+            host = hosts[0]["name"]
355 371
             device_id = self._get_device_id_by_name(host)
356 372
             vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
357
-                zone_id, device_id)
373
+                zone_id, device_id
374
+            )
358 375
 
359 376
         LOG.debug("Extracted region network information\n{}".format(vlans))
360 377
         vlans_list = []
361 378
         for vlan_ in vlans:
362
-            if len(vlan_.vlan.ipv4) is not 0:
379
+            if len(vlan_.vlan.ipv4) != 0:
363 380
                 tmp_vlan = {}
364
-                tmp_vlan['name'] = self._get_network_name_from_vlan_name(
365
-                    vlan_.vlan.name)
366
-                tmp_vlan['vlan'] = vlan_.vlan.vlan_id
367
-                tmp_vlan['subnet'] = vlan_.vlan.subnet_range
368
-                tmp_vlan['gateway'] = vlan_.ipv4_gateway
369
-                tmp_vlan['subnet_level'] = vlan_.vlan.subnet_level
381
+                tmp_vlan["name"] = self._get_network_name_from_vlan_name(
382
+                    vlan_.vlan.name
383
+                )
384
+                tmp_vlan["vlan"] = vlan_.vlan.vlan_id
385
+                tmp_vlan["subnet"] = vlan_.vlan.subnet_range
386
+                tmp_vlan["gateway"] = vlan_.ipv4_gateway
387
+                tmp_vlan["subnet_level"] = vlan_.vlan.subnet_level
370 388
                 vlans_list.append(tmp_vlan)
371 389
 
372 390
         return vlans_list
373 391
 
374 392
     def get_ips(self, region, host=None):
375
-        zone = self.region_zone_map[region]['zone']
393
+        zone = self.region_zone_map[region]["zone"]
376 394
         zone_id = self._get_zone_id_by_name(zone)
377 395
 
378 396
         if host:
@@ -381,7 +399,7 @@ class FormationPlugin(BaseDataSourcePlugin):
381 399
             hosts = []
382 400
             hosts_dict = self.get_hosts(zone)
383 401
             for host in hosts_dict:
384
-                hosts.append(host['name'])
402
+                hosts.append(host["name"])
385 403
 
386 404
         vlan_api = formation_client.VlansApi(self.formation_api_client)
387 405
         ip_ = {}
@@ -389,18 +407,23 @@ class FormationPlugin(BaseDataSourcePlugin):
389 407
         for host in hosts:
390 408
             device_id = self._get_device_id_by_name(host)
391 409
             vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
392
-                zone_id, device_id)
410
+                zone_id, device_id
411
+            )
393 412
             LOG.debug("Received VLAN Network Information\n{}".format(vlans))
394 413
             ip_[host] = {}
395 414
             for vlan_ in vlans:
396 415
                 # TODO(pg710r) We need to handle the case when incoming ipv4
397 416
                 # list is empty
398
-                if len(vlan_.vlan.ipv4) is not 0:
417
+                if len(vlan_.vlan.ipv4) != 0:
399 418
                     name = self._get_network_name_from_vlan_name(
400
-                        vlan_.vlan.name)
419
+                        vlan_.vlan.name
420
+                    )
401 421
                     ipv4 = vlan_.vlan.ipv4[0].ip
402
-                    LOG.debug("vlan:{},name:{},ip:{},vlan_name:{}".format(
403
-                        vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name))
422
+                    LOG.debug(
423
+                        "vlan:{},name:{},ip:{},vlan_name:{}".format(
424
+                            vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name
425
+                        )
426
+                    )
404 427
                     # TODD(pg710r) This code needs to extended to support ipv4
405 428
                     # and ipv6
406 429
                     # ip_[host][name] = {'ipv4': ipv4}
@@ -419,12 +442,12 @@ class FormationPlugin(BaseDataSourcePlugin):
419 442
             vlan_name contains "ILO" the network name is "oob"
420 443
         """
421 444
         network_names = {
422
-            'ksn': 'calico',
423
-            'storage': 'storage',
424
-            'server': 'oam',
425
-            'ovs': 'overlay',
426
-            'ILO': 'oob',
427
-            'pxe': 'pxe'
445
+            "ksn": "calico",
446
+            "storage": "storage",
447
+            "server": "oam",
448
+            "ovs": "overlay",
449
+            "ILO": "oob",
450
+            "pxe": "pxe",
428 451
         }
429 452
 
430 453
         for name in network_names:
@@ -438,7 +461,7 @@ class FormationPlugin(BaseDataSourcePlugin):
438 461
 
439 462
     def get_dns_servers(self, region):
440 463
         try:
441
-            zone = self.region_zone_map[region]['zone']
464
+            zone = self.region_zone_map[region]["zone"]
442 465
             zone_id = self._get_zone_id_by_name(zone)
443 466
             zone_api = formation_client.ZonesApi(self.formation_api_client)
444 467
             zone_ = zone_api.zones_zone_id_get(zone_id)
@@ -463,7 +486,7 @@ class FormationPlugin(BaseDataSourcePlugin):
463 486
 
464 487
     def get_location_information(self, region):
465 488
         """ get location information for a zone and return """
466
-        site = self.region_zone_map[region]['site']
489
+        site = self.region_zone_map[region]["site"]
467 490
         site_id = self._get_site_id_by_name(site)
468 491
         site_api = formation_client.SitesApi(self.formation_api_client)
469 492
         site_info = site_api.sites_site_id_get(site_id)
@@ -471,18 +494,19 @@ class FormationPlugin(BaseDataSourcePlugin):
471 494
         try:
472 495
             return {
473 496
                 # 'corridor': site_info.corridor,
474
-                'name': site_info.city,
475
-                'state': site_info.state,
476
-                'country': site_info.country,
477
-                'physical_location_id': site_info.clli,
497
+                "name": site_info.city,
498
+                "state": site_info.state,
499
+                "country": site_info.country,
500
+                "physical_location_id": site_info.clli,
478 501
             }
479 502
         except AttributeError as e:
480
-            raise MissingAttributeError('Missing {} information in {}'.format(
481
-                e, site_info.city))
503
+            raise MissingAttributeError(
504
+                "Missing {} information in {}".format(e, site_info.city)
505
+            )
482 506
 
483 507
     def get_domain_name(self, region):
484 508
         try:
485
-            zone = self.region_zone_map[region]['zone']
509
+            zone = self.region_zone_map[region]["zone"]
486 510
             zone_id = self._get_zone_id_by_name(zone)
487 511
             zone_api = formation_client.ZonesApi(self.formation_api_client)
488 512
             zone_ = zone_api.zones_zone_id_get(zone_id)
@@ -490,7 +514,7 @@ class FormationPlugin(BaseDataSourcePlugin):
490 514
             raise ApiClientError(e.msg)
491 515
 
492 516
         if not zone_.dns:
493
-            LOG.warn('Got None while running get domain name')
517
+            LOG.warn("Got None while running get domain name")
494 518
             return None
495 519
 
496 520
         return zone_.dns

+ 6
- 3
spyglass/data_extractor/plugins/tugboat/check_exceptions.py View File

@@ -23,7 +23,7 @@ class NotEnoughIp(BaseError):
23 23
         self.total_nodes = total_nodes
24 24
 
25 25
     def display_error(self):
26
-        print('{} can not handle {} nodes'.format(self.cidr, self.total_nodes))
26
+        print("{} can not handle {} nodes".format(self.cidr, self.total_nodes))
27 27
 
28 28
 
29 29
 class NoSpecMatched(BaseError):
@@ -31,5 +31,8 @@ class NoSpecMatched(BaseError):
31 31
         self.specs = excel_specs
32 32
 
33 33
     def display_error(self):
34
-        print('No spec matched. Following are the available specs:\n'.format(
35
-            self.specs))
34
+        print(
35
+            "No spec matched. Following are the available specs:\n".format(
36
+                self.specs
37
+            )
38
+        )

+ 185
- 153
spyglass/data_extractor/plugins/tugboat/excel_parser.py View File

@@ -20,17 +20,18 @@ import yaml
20 20
 from openpyxl import load_workbook
21 21
 from openpyxl import Workbook
22 22
 from spyglass.data_extractor.custom_exceptions import NoSpecMatched
23
+
23 24
 # from spyglass.data_extractor.custom_exceptions
24 25
 
25 26
 LOG = logging.getLogger(__name__)
26 27
 
27 28
 
28
-class ExcelParser():
29
+class ExcelParser:
29 30
     """ Parse data from excel into a dict """
30 31
 
31 32
     def __init__(self, file_name, excel_specs):
32 33
         self.file_name = file_name
33
-        with open(excel_specs, 'r') as f:
34
+        with open(excel_specs, "r") as f:
34 35
             spec_raw_data = f.read()
35 36
         self.excel_specs = yaml.safe_load(spec_raw_data)
36 37
         # A combined design spec, returns a workbok object after combining
@@ -38,12 +39,12 @@ class ExcelParser():
38 39
         combined_design_spec = self.combine_excel_design_specs(file_name)
39 40
         self.wb_combined = combined_design_spec
40 41
         self.filenames = file_name
41
-        self.spec = 'xl_spec'
42
+        self.spec = "xl_spec"
42 43
 
43 44
     @staticmethod
44 45
     def sanitize(string):
45 46
         """ Remove extra spaces and convert string to lower case """
46
-        return string.replace(' ', '').lower()
47
+        return string.replace(" ", "").lower()
47 48
 
48 49
     def compare(self, string1, string2):
49 50
         """ Compare the strings """
@@ -52,19 +53,19 @@ class ExcelParser():
52 53
     def validate_sheet(self, spec, sheet):
53 54
         """ Check if the sheet is correct or not """
54 55
         ws = self.wb_combined[sheet]
55
-        header_row = self.excel_specs['specs'][spec]['header_row']
56
-        ipmi_header = self.excel_specs['specs'][spec]['ipmi_address_header']
57
-        ipmi_column = self.excel_specs['specs'][spec]['ipmi_address_col']
56
+        header_row = self.excel_specs["specs"][spec]["header_row"]
57
+        ipmi_header = self.excel_specs["specs"][spec]["ipmi_address_header"]
58
+        ipmi_column = self.excel_specs["specs"][spec]["ipmi_address_col"]
58 59
         header_value = ws.cell(row=header_row, column=ipmi_column).value
59 60
         return bool(self.compare(ipmi_header, header_value))
60 61
 
61 62
     def find_correct_spec(self):
62 63
         """ Find the correct spec """
63
-        for spec in self.excel_specs['specs']:
64
-            sheet_name = self.excel_specs['specs'][spec]['ipmi_sheet_name']
64
+        for spec in self.excel_specs["specs"]:
65
+            sheet_name = self.excel_specs["specs"][spec]["ipmi_sheet_name"]
65 66
             for sheet in self.wb_combined.sheetnames:
66 67
                 if self.compare(sheet_name, sheet):
67
-                    self.excel_specs['specs'][spec]['ipmi_sheet_name'] = sheet
68
+                    self.excel_specs["specs"][spec]["ipmi_sheet_name"] = sheet
68 69
                     if self.validate_sheet(spec, sheet):
69 70
                         return spec
70 71
         raise NoSpecMatched(self.excel_specs)
@@ -73,31 +74,37 @@ class ExcelParser():
73 74
         """ Read IPMI data from the sheet """
74 75
         ipmi_data = {}
75 76
         hosts = []
76
-        provided_sheetname = self.excel_specs['specs'][self.
77
-                                                       spec]['ipmi_sheet_name']
77
+        provided_sheetname = self.excel_specs["specs"][self.spec][
78
+            "ipmi_sheet_name"
79
+        ]
78 80
         workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
79
-            provided_sheetname)
81
+            provided_sheetname
82
+        )
80 83
         if workbook_object is not None:
81 84
             ws = workbook_object[extracted_sheetname]
82 85
         else:
83 86
             ws = self.wb_combined[provided_sheetname]
84
-        row = self.excel_specs['specs'][self.spec]['start_row']
85
-        end_row = self.excel_specs['specs'][self.spec]['end_row']
86
-        hostname_col = self.excel_specs['specs'][self.spec]['hostname_col']
87
-        ipmi_address_col = self.excel_specs['specs'][self.
88
-                                                     spec]['ipmi_address_col']
89
-        host_profile_col = self.excel_specs['specs'][self.
90
-                                                     spec]['host_profile_col']
91
-        ipmi_gateway_col = self.excel_specs['specs'][self.
92
-                                                     spec]['ipmi_gateway_col']
87
+        row = self.excel_specs["specs"][self.spec]["start_row"]
88
+        end_row = self.excel_specs["specs"][self.spec]["end_row"]
89
+        hostname_col = self.excel_specs["specs"][self.spec]["hostname_col"]
90
+        ipmi_address_col = self.excel_specs["specs"][self.spec][
91
+            "ipmi_address_col"
92
+        ]
93
+        host_profile_col = self.excel_specs["specs"][self.spec][
94
+            "host_profile_col"
95
+        ]
96
+        ipmi_gateway_col = self.excel_specs["specs"][self.spec][
97
+            "ipmi_gateway_col"
98
+        ]
93 99
         previous_server_gateway = None
94 100
         while row <= end_row:
95 101
             hostname = self.sanitize(
96
-                ws.cell(row=row, column=hostname_col).value)
102
+                ws.cell(row=row, column=hostname_col).value
103
+            )
97 104
             hosts.append(hostname)
98 105
             ipmi_address = ws.cell(row=row, column=ipmi_address_col).value
99
-            if '/' in ipmi_address:
100
-                ipmi_address = ipmi_address.split('/')[0]
106
+            if "/" in ipmi_address:
107
+                ipmi_address = ipmi_address.split("/")[0]
101 108
             ipmi_gateway = ws.cell(row=row, column=ipmi_gateway_col).value
102 109
             if ipmi_gateway:
103 110
                 previous_server_gateway = ipmi_gateway
@@ -106,32 +113,39 @@ class ExcelParser():
106 113
             host_profile = ws.cell(row=row, column=host_profile_col).value
107 114
             try:
108 115
                 if host_profile is None:
109
-                    raise RuntimeError("No value read from {} ".format(
110
-                        self.file_name) + "sheet:{} row:{}, col:{}".format(
111
-                            self.spec, row, host_profile_col))
116
+                    raise RuntimeError(
117
+                        "No value read from {} ".format(self.file_name)
118
+                        + "sheet:{} row:{}, col:{}".format(
119
+                            self.spec, row, host_profile_col
120
+                        )
121
+                    )
112 122
             except RuntimeError as rerror:
113 123
                 LOG.critical(rerror)
114 124
                 sys.exit("Tugboat exited!!")
115 125
             ipmi_data[hostname] = {
116
-                'ipmi_address': ipmi_address,
117
-                'ipmi_gateway': ipmi_gateway,
118
-                'host_profile': host_profile,
119
-                'type': type,
126
+                "ipmi_address": ipmi_address,
127
+                "ipmi_gateway": ipmi_gateway,
128
+                "host_profile": host_profile,
129
+                "type": type,
120 130
             }
121 131
             row += 1
122
-        LOG.debug("ipmi data extracted from excel:\n{}".format(
123
-            pprint.pformat(ipmi_data)))
124
-        LOG.debug("host data extracted from excel:\n{}".format(
125
-            pprint.pformat(hosts)))
132
+        LOG.debug(
133
+            "ipmi data extracted from excel:\n{}".format(
134
+                pprint.pformat(ipmi_data)
135
+            )
136
+        )
137
+        LOG.debug(
138
+            "host data extracted from excel:\n{}".format(pprint.pformat(hosts))
139
+        )
126 140
         return [ipmi_data, hosts]
127 141
 
128 142
     def get_private_vlan_data(self, ws):
129 143
         """ Get private vlan data from private IP sheet """
130 144
         vlan_data = {}
131
-        row = self.excel_specs['specs'][self.spec]['vlan_start_row']
132
-        end_row = self.excel_specs['specs'][self.spec]['vlan_end_row']
133
-        type_col = self.excel_specs['specs'][self.spec]['net_type_col']
134
-        vlan_col = self.excel_specs['specs'][self.spec]['vlan_col']
145
+        row = self.excel_specs["specs"][self.spec]["vlan_start_row"]
146
+        end_row = self.excel_specs["specs"][self.spec]["vlan_end_row"]
147
+        type_col = self.excel_specs["specs"][self.spec]["net_type_col"]
148
+        vlan_col = self.excel_specs["specs"][self.spec]["vlan_col"]
135 149
         while row <= end_row:
136 150
             cell_value = ws.cell(row=row, column=type_col).value
137 151
             if cell_value:
@@ -140,27 +154,30 @@ class ExcelParser():
140 154
                     vlan = vlan.lower()
141 155
                 vlan_data[vlan] = cell_value
142 156
             row += 1
143
-        LOG.debug("vlan data extracted from excel:\n%s",
144
-                  pprint.pformat(vlan_data))
157
+        LOG.debug(
158
+            "vlan data extracted from excel:\n%s", pprint.pformat(vlan_data)
159
+        )
145 160
         return vlan_data
146 161
 
147 162
     def get_private_network_data(self):
148 163
         """ Read network data from the private ip sheet """
149
-        provided_sheetname = self.excel_specs['specs'][
150
-            self.spec]['private_ip_sheet']
164
+        provided_sheetname = self.excel_specs["specs"][self.spec][
165
+            "private_ip_sheet"
166
+        ]
151 167
         workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
152
-            provided_sheetname)
168
+            provided_sheetname
169
+        )
153 170
         if workbook_object is not None:
154 171
             ws = workbook_object[extracted_sheetname]
155 172
         else:
156 173
             ws = self.wb_combined[provided_sheetname]
157 174
         vlan_data = self.get_private_vlan_data(ws)
158 175
         network_data = {}
159
-        row = self.excel_specs['specs'][self.spec]['net_start_row']
160
-        end_row = self.excel_specs['specs'][self.spec]['net_end_row']
161
-        col = self.excel_specs['specs'][self.spec]['net_col']
162
-        vlan_col = self.excel_specs['specs'][self.spec]['net_vlan_col']
163
-        old_vlan = ''
176
+        row = self.excel_specs["specs"][self.spec]["net_start_row"]
177
+        end_row = self.excel_specs["specs"][self.spec]["net_end_row"]
178
+        col = self.excel_specs["specs"][self.spec]["net_col"]
179
+        vlan_col = self.excel_specs["specs"][self.spec]["net_vlan_col"]
180
+        old_vlan = ""
164 181
         while row <= end_row:
165 182
             vlan = ws.cell(row=row, column=vlan_col).value
166 183
             if vlan:
@@ -168,11 +185,8 @@ class ExcelParser():
168 185
             network = ws.cell(row=row, column=col).value
169 186
             if vlan and network:
170 187
                 net_type = vlan_data[vlan]
171
-                if 'vlan' not in network_data:
172
-                    network_data[net_type] = {
173
-                        'vlan': vlan,
174
-                        'subnet': [],
175
-                    }
188
+                if "vlan" not in network_data:
189
+                    network_data[net_type] = {"vlan": vlan, "subnet": []}
176 190
             elif not vlan and network:
177 191
                 # If vlan is not present then assign old vlan to vlan as vlan
178 192
                 # value is spread over several rows
@@ -180,11 +194,11 @@ class ExcelParser():
180 194
             else:
181 195
                 row += 1
182 196
                 continue
183
-            network_data[vlan_data[vlan]]['subnet'].append(network)
197
+            network_data[vlan_data[vlan]]["subnet"].append(network)
184 198
             old_vlan = vlan
185 199
             row += 1
186 200
         for network in network_data:
187
-            network_data[network]['is_common'] = True
201
+            network_data[network]["is_common"] = True
188 202
             """
189 203
             if len(network_data[network]['subnet']) > 1:
190 204
                 network_data[network]['is_common'] = False
@@ -199,153 +213,167 @@ class ExcelParser():
199 213
     def get_public_network_data(self):
200 214
         """ Read public network data from public ip data """
201 215
         network_data = {}
202
-        provided_sheetname = self.excel_specs['specs'][self.
203
-                                                       spec]['public_ip_sheet']
216
+        provided_sheetname = self.excel_specs["specs"][self.spec][
217
+            "public_ip_sheet"
218
+        ]
204 219
         workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
205
-            provided_sheetname)
220
+            provided_sheetname
221
+        )
206 222
         if workbook_object is not None:
207 223
             ws = workbook_object[extracted_sheetname]
208 224
         else:
209 225
             ws = self.wb_combined[provided_sheetname]
210
-        oam_row = self.excel_specs['specs'][self.spec]['oam_ip_row']
211
-        oam_col = self.excel_specs['specs'][self.spec]['oam_ip_col']
212
-        oam_vlan_col = self.excel_specs['specs'][self.spec]['oam_vlan_col']
213
-        ingress_row = self.excel_specs['specs'][self.spec]['ingress_ip_row']
214
-        oob_row = self.excel_specs['specs'][self.spec]['oob_net_row']
215
-        col = self.excel_specs['specs'][self.spec]['oob_net_start_col']
216
-        end_col = self.excel_specs['specs'][self.spec]['oob_net_end_col']
226
+        oam_row = self.excel_specs["specs"][self.spec]["oam_ip_row"]
227
+        oam_col = self.excel_specs["specs"][self.spec]["oam_ip_col"]
228
+        oam_vlan_col = self.excel_specs["specs"][self.spec]["oam_vlan_col"]
229
+        ingress_row = self.excel_specs["specs"][self.spec]["ingress_ip_row"]
230
+        oob_row = self.excel_specs["specs"][self.spec]["oob_net_row"]
231
+        col = self.excel_specs["specs"][self.spec]["oob_net_start_col"]
232
+        end_col = self.excel_specs["specs"][self.spec]["oob_net_end_col"]
217 233
         network_data = {
218
-            'oam': {
219
-                'subnet': [ws.cell(row=oam_row, column=oam_col).value],
220
-                'vlan': ws.cell(row=oam_row, column=oam_vlan_col).value,
234
+            "oam": {
235
+                "subnet": [ws.cell(row=oam_row, column=oam_col).value],
236
+                "vlan": ws.cell(row=oam_row, column=oam_vlan_col).value,
221 237
             },
222
-            'ingress': ws.cell(row=ingress_row, column=oam_col).value,
223
-        }
224
-        network_data['oob'] = {
225
-            'subnet': [],
238
+            "ingress": ws.cell(row=ingress_row, column=oam_col).value,
226 239
         }
240
+        network_data["oob"] = {"subnet": []}
227 241
         while col <= end_col:
228 242
             cell_value = ws.cell(row=oob_row, column=col).value
229 243
             if cell_value:
230
-                network_data['oob']['subnet'].append(self.sanitize(cell_value))
244
+                network_data["oob"]["subnet"].append(self.sanitize(cell_value))
231 245
             col += 1
232 246
         LOG.debug(
233 247
             "public network data extracted from\
234
-                          excel:\n%s", pprint.pformat(network_data))
248
+                          excel:\n%s",
249
+            pprint.pformat(network_data),
250
+        )
235 251
         return network_data
236 252
 
237 253
     def get_site_info(self):
238 254
         """ Read location, dns, ntp and ldap data"""
239 255
         site_info = {}
240
-        provided_sheetname = self.excel_specs['specs'][
241
-            self.spec]['dns_ntp_ldap_sheet']
256
+        provided_sheetname = self.excel_specs["specs"][self.spec][
257
+            "dns_ntp_ldap_sheet"
258
+        ]
242 259
         workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
243
-            provided_sheetname)
260
+            provided_sheetname
261
+        )
244 262
         if workbook_object is not None:
245 263
             ws = workbook_object[extracted_sheetname]
246 264
         else:
247 265
             ws = self.wb_combined[provided_sheetname]
248
-        dns_row = self.excel_specs['specs'][self.spec]['dns_row']
249
-        dns_col = self.excel_specs['specs'][self.spec]['dns_col']
250
-        ntp_row = self.excel_specs['specs'][self.spec]['ntp_row']
251
-        ntp_col = self.excel_specs['specs'][self.spec]['ntp_col']
252
-        domain_row = self.excel_specs['specs'][self.spec]['domain_row']
253
-        domain_col = self.excel_specs['specs'][self.spec]['domain_col']
254
-        login_domain_row = self.excel_specs['specs'][self.
255
-                                                     spec]['login_domain_row']
256
-        ldap_col = self.excel_specs['specs'][self.spec]['ldap_col']
257
-        global_group = self.excel_specs['specs'][self.spec]['global_group']
258
-        ldap_search_url_row = self.excel_specs['specs'][
259
-            self.spec]['ldap_search_url_row']
266
+        dns_row = self.excel_specs["specs"][self.spec]["dns_row"]
267
+        dns_col = self.excel_specs["specs"][self.spec]["dns_col"]
268
+        ntp_row = self.excel_specs["specs"][self.spec]["ntp_row"]
269
+        ntp_col = self.excel_specs["specs"][self.spec]["ntp_col"]
270
+        domain_row = self.excel_specs["specs"][self.spec]["domain_row"]
271
+        domain_col = self.excel_specs["specs"][self.spec]["domain_col"]
272
+        login_domain_row = self.excel_specs["specs"][self.spec][
273
+            "login_domain_row"
274
+        ]
275
+        ldap_col = self.excel_specs["specs"][self.spec]["ldap_col"]
276
+        global_group = self.excel_specs["specs"][self.spec]["global_group"]
277
+        ldap_search_url_row = self.excel_specs["specs"][self.spec][
278
+            "ldap_search_url_row"
279
+        ]
260 280
         dns_servers = ws.cell(row=dns_row, column=dns_col).value
261 281
         ntp_servers = ws.cell(row=ntp_row, column=ntp_col).value
262 282
         try:
263 283
             if dns_servers is None:
264 284
                 raise RuntimeError(
265
-                    "No value for dns_server from:{} Sheet:'{}' Row:{} Col:{}".
266
-                    format(self.file_name, provided_sheetname, dns_row,
267
-                           dns_col))
268
-                raise RuntimeError(
269
-                    "No value for ntp_server frome:{} Sheet:'{}' Row:{} Col:{}"
270
-                    .format(self.file_name, provided_sheetname, ntp_row,
271
-                            ntp_col))
285
+                    (
286
+                        "No value for dns_server from:{} Sheet:'{}' ",
287
+                        "Row:{} Col:{}",
288
+                    ).format(
289
+                        self.file_name, provided_sheetname, dns_row, dns_col
290
+                    )
291
+                )
272 292
         except RuntimeError as rerror:
273 293
             LOG.critical(rerror)
274 294
             sys.exit("Tugboat exited!!")
275 295
 
276
-        dns_servers = dns_servers.replace('\n', ' ')
277
-        ntp_servers = ntp_servers.replace('\n', ' ')
278
-        if ',' in dns_servers:
279
-            dns_servers = dns_servers.split(',')
296
+        dns_servers = dns_servers.replace("\n", " ")
297
+        ntp_servers = ntp_servers.replace("\n", " ")
298
+        if "," in dns_servers:
299
+            dns_servers = dns_servers.split(",")
280 300
         else:
281 301
             dns_servers = dns_servers.split()
282
-        if ',' in ntp_servers:
283
-            ntp_servers = ntp_servers.split(',')
302
+        if "," in ntp_servers:
303
+            ntp_servers = ntp_servers.split(",")
284 304
         else:
285 305
             ntp_servers = ntp_servers.split()
286 306
         site_info = {
287
-            'location': self.get_location_data(),
288
-            'dns': dns_servers,
289
-            'ntp': ntp_servers,
290
-            'domain': ws.cell(row=domain_row, column=domain_col).value,
291
-            'ldap': {
292
-                'subdomain': ws.cell(row=login_domain_row,
293
-                                     column=ldap_col).value,
294
-                'common_name': ws.cell(row=global_group,
295
-                                       column=ldap_col).value,
296
-                'url': ws.cell(row=ldap_search_url_row, column=ldap_col).value,
297
-            }
307
+            "location": self.get_location_data(),
308
+            "dns": dns_servers,
309
+            "ntp": ntp_servers,
310
+            "domain": ws.cell(row=domain_row, column=domain_col).value,
311
+            "ldap": {
312
+                "subdomain": ws.cell(
313
+                    row=login_domain_row, column=ldap_col
314
+                ).value,
315
+                "common_name": ws.cell(
316
+                    row=global_group, column=ldap_col
317
+                ).value,
318
+                "url": ws.cell(row=ldap_search_url_row, column=ldap_col).value,
319
+            },
298 320
         }
299 321
         LOG.debug(
300 322
             "Site Info extracted from\
301
-                          excel:\n%s", pprint.pformat(site_info))
323
+                          excel:\n%s",
324
+            pprint.pformat(site_info),
325
+        )
302 326
         return site_info
303 327
 
304 328
     def get_location_data(self):
305 329
         """ Read location data from the site and zone sheet """
306
-        provided_sheetname = self.excel_specs['specs'][self.
307
-                                                       spec]['location_sheet']
330
+        provided_sheetname = self.excel_specs["specs"][self.spec][
331
+            "location_sheet"
332
+        ]
308 333
         workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
309
-            provided_sheetname)
334
+            provided_sheetname
335
+        )
310 336
         if workbook_object is not None:
311 337
             ws = workbook_object[extracted_sheetname]
312 338
         else:
313 339
             ws = self.wb_combined[provided_sheetname]
314
-        corridor_row = self.excel_specs['specs'][self.spec]['corridor_row']
315
-        column = self.excel_specs['specs'][self.spec]['column']
316
-        site_name_row = self.excel_specs['specs'][self.spec]['site_name_row']
317
-        state_name_row = self.excel_specs['specs'][self.spec]['state_name_row']
318
-        country_name_row = self.excel_specs['specs'][self.
319
-                                                     spec]['country_name_row']
320
-        clli_name_row = self.excel_specs['specs'][self.spec]['clli_name_row']
340
+        corridor_row = self.excel_specs["specs"][self.spec]["corridor_row"]
341
+        column = self.excel_specs["specs"][self.spec]["column"]
342
+        site_name_row = self.excel_specs["specs"][self.spec]["site_name_row"]
343
+        state_name_row = self.excel_specs["specs"][self.spec]["state_name_row"]
344
+        country_name_row = self.excel_specs["specs"][self.spec][
345
+            "country_name_row"
346
+        ]
347
+        clli_name_row = self.excel_specs["specs"][self.spec]["clli_name_row"]
321 348
         return {
322
-            'corridor': ws.cell(row=corridor_row, column=column).value,
323
-            'name': ws.cell(row=site_name_row, column=column).value,
324
-            'state': ws.cell(row=state_name_row, column=column).value,
325
-            'country': ws.cell(row=country_name_row, column=column).value,
326
-            'physical_location': ws.cell(row=clli_name_row,
327
-                                         column=column).value,
349
+            "corridor": ws.cell(row=corridor_row, column=column).value,
350
+            "name": ws.cell(row=site_name_row, column=column).value,
351
+            "state": ws.cell(row=state_name_row, column=column).value,
352
+            "country": ws.cell(row=country_name_row, column=column).value,
353
+            "physical_location": ws.cell(
354
+                row=clli_name_row, column=column
355
+            ).value,
328 356
         }
329 357
 
330 358
     def validate_sheet_names_with_spec(self):
331 359
         """ Checks is sheet name in spec file matches with excel file"""
332
-        spec = list(self.excel_specs['specs'].keys())[0]
333
-        spec_item = self.excel_specs['specs'][spec]
360
+        spec = list(self.excel_specs["specs"].keys())[0]
361
+        spec_item = self.excel_specs["specs"][spec]
334 362
         sheet_name_list = []
335
-        ipmi_header_sheet_name = spec_item['ipmi_sheet_name']
363
+        ipmi_header_sheet_name = spec_item["ipmi_sheet_name"]
336 364
         sheet_name_list.append(ipmi_header_sheet_name)
337
-        private_ip_sheet_name = spec_item['private_ip_sheet']
365
+        private_ip_sheet_name = spec_item["private_ip_sheet"]
338 366
         sheet_name_list.append(private_ip_sheet_name)
339
-        public_ip_sheet_name = spec_item['public_ip_sheet']
367
+        public_ip_sheet_name = spec_item["public_ip_sheet"]
340 368
         sheet_name_list.append(public_ip_sheet_name)
341
-        dns_ntp_ldap_sheet_name = spec_item['dns_ntp_ldap_sheet']
369
+        dns_ntp_ldap_sheet_name = spec_item["dns_ntp_ldap_sheet"]
342 370
         sheet_name_list.append(dns_ntp_ldap_sheet_name)
343
-        location_sheet_name = spec_item['location_sheet']
371
+        location_sheet_name = spec_item["location_sheet"]
344 372
         sheet_name_list.append(location_sheet_name)
345 373
         try:
346 374
             for sheetname in sheet_name_list:
347
-                workbook_object, extracted_sheetname = \
348
-                    self.get_xl_obj_and_sheetname(sheetname)
375
+                workbook_object, extracted_sheetname = (
376
+                    self.get_xl_obj_and_sheetname(sheetname))
349 377
                 if workbook_object is not None:
350 378
                     wb = workbook_object
351 379
                     sheetname = extracted_sheetname
@@ -354,7 +382,8 @@ class ExcelParser():
354 382
 
355 383
                 if sheetname not in wb.sheetnames:
356 384
                     raise RuntimeError(
357
-                        "SheetName '{}' not found ".format(sheetname))
385
+                        "SheetName '{}' not found ".format(sheetname)
386
+                    )
358 387
         except RuntimeError as rerror:
359 388
             LOG.critical(rerror)
360 389
             sys.exit("Tugboat exited!!")
@@ -369,16 +398,18 @@ class ExcelParser():
369 398
         public_network_data = self.get_public_network_data()
370 399
         site_info_data = self.get_site_info()
371 400
         data = {
372
-            'ipmi_data': ipmi_data,
373
-            'network_data': {
374
-                'private': network_data,
375
-                'public': public_network_data,
401
+            "ipmi_data": ipmi_data,
402
+            "network_data": {
403
+                "private": network_data,
404
+                "public": public_network_data,
376 405
             },
377
-            'site_info': site_info_data,
406
+            "site_info": site_info_data,
378 407
         }
379 408
         LOG.debug(
380 409
             "Location data extracted from\
381
-                          excel:\n%s", pprint.pformat(data))
410
+                          excel:\n%s",
411
+            pprint.pformat(data),
412
+        )
382 413
         return data
383 414
 
384 415
     def combine_excel_design_specs(self, filenames):
@@ -391,8 +422,9 @@ class ExcelParser():
391 422
                 loaded_workbook_ws = loaded_workbook[names]
392 423
                 for row in loaded_workbook_ws:
393 424
                     for cell in row:
394
-                        design_spec_worksheet[cell.
395
-                                              coordinate].value = cell.value
425
+                        design_spec_worksheet[
426
+                            cell.coordinate
427
+                        ].value = cell.value
396 428
         return design_spec
397 429
 
398 430
     def get_xl_obj_and_sheetname(self, sheetname):
@@ -400,10 +432,10 @@ class ExcelParser():
400 432
         The logic confirms if the sheetname is specified for example as:
401 433
             "MTN57a_AEC_Network_Design_v1.6.xlsx:Public IPs"
402 434
         """
403
-        if (re.search('.xlsx', sheetname) or re.search('.xls', sheetname)):
435
+        if re.search(".xlsx", sheetname) or re.search(".xls", sheetname):
404 436
             """ Extract file name """
405
-            source_xl_file = sheetname.split(':')[0]
437
+            source_xl_file = sheetname.split(":")[0]
406 438
             wb = load_workbook(source_xl_file, data_only=True)
407
-            return [wb, sheetname.split(':')[1]]
439
+            return [wb, sheetname.split(":")[1]]
408 440
         else:
409 441
             return [None, sheetname]

+ 104
- 90
spyglass/data_extractor/plugins/tugboat/tugboat.py View File

@@ -25,8 +25,8 @@ LOG = logging.getLogger(__name__)
25 25
 class TugboatPlugin(BaseDataSourcePlugin):
26 26
     def __init__(self, region):
27 27
         LOG.info("Tugboat Initializing")
28
-        self.source_type = 'excel'
29
-        self.source_name = 'tugboat'
28
+        self.source_type = "excel"
29
+        self.source_name = "tugboat"
30 30
 
31 31
         # Configuration parameters
32 32
         self.excel_path = None
@@ -52,8 +52,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
52 52
 
53 53
         Each plugin will have their own config opts.
54 54
         """
55
-        self.excel_path = conf['excel_path']
56
-        self.excel_spec = conf['excel_spec']
55
+        self.excel_path = conf["excel_path"]
56
+        self.excel_spec = conf["excel_spec"]
57 57
 
58 58
         # Extract raw data from excel sheets
59 59
         self._get_excel_obj()
@@ -69,18 +69,18 @@ class TugboatPlugin(BaseDataSourcePlugin):
69 69
         written as an additional safeguard.
70 70
         """
71 71
         try:
72
-            assert (len(
73
-                kwargs['excel'])), "Engineering Spec file not specified"
74
-            excel_file_info = kwargs['excel']
75
-            assert (kwargs['excel_spec']
76
-                    ) is not None, "Excel Spec file not specified"
77
-            excel_spec_info = kwargs['excel_spec']
72
+            assert len(kwargs["excel"]), "Engineering Spec file not specified"
73
+            excel_file_info = kwargs["excel"]
74
+            assert (
75
+                kwargs["excel_spec"]
76
+            ) is not None, "Excel Spec file not specified"
77
+            excel_spec_info = kwargs["excel_spec"]
78 78
         except AssertionError as e:
79 79
             LOG.error("{}:Spyglass exited!".format(e))
80 80
             exit()
81 81
         plugin_conf = {
82
-            'excel_path': excel_file_info,
83
-            'excel_spec': excel_spec_info
82
+            "excel_path": excel_file_info,
83
+            "excel_spec": excel_spec_info,
84 84
         }
85 85
         return plugin_conf
86 86
 
@@ -103,19 +103,18 @@ class TugboatPlugin(BaseDataSourcePlugin):
103 103
                  ]
104 104
         """
105 105
         LOG.info("Get Host Information")
106
-        ipmi_data = self.parsed_xl_data['ipmi_data'][0]
106
+        ipmi_data = self.parsed_xl_data["ipmi_data"][0]
107 107
         rackwise_hosts = self._get_rackwise_hosts()
108 108
         host_list = []
109 109
         for rack in rackwise_hosts.keys():
110 110
             for host in rackwise_hosts[rack]:
111
-                host_list.append({
112
-                    'rack_name':
113
-                    rack,
114
-                    'name':
115
-                    host,
116
-                    'host_profile':
117
-                    ipmi_data[host]['host_profile']
118
-                })
111
+                host_list.append(
112
+                    {
113
+                        "rack_name": rack,
114
+                        "name": host,
115
+                        "host_profile": ipmi_data[host]["host_profile"],
116
+                    }
117
+                )
119 118
         return host_list
120 119
 
121 120
     def get_networks(self, region):
@@ -123,39 +122,44 @@ class TugboatPlugin(BaseDataSourcePlugin):
123 122
         vlan_list = []
124 123
         # Network data extracted from xl is formatted to have a predictable
125 124
         # data type. For e.g VlAN 45 extracted from xl is formatted as 45
126
-        vlan_pattern = r'\d+'
127
-        private_net = self.parsed_xl_data['network_data']['private']
128
-        public_net = self.parsed_xl_data['network_data']['public']
125
+        vlan_pattern = r"\d+"
126
+        private_net = self.parsed_xl_data["network_data"]["private"]
127
+        public_net = self.parsed_xl_data["network_data"]["public"]
129 128
         # Extract network information from private and public network data
130
-        for net_type, net_val in itertools.chain(private_net.items(),
131
-                                                 public_net.items()):
129
+        for net_type, net_val in itertools.chain(
130
+            private_net.items(), public_net.items()
131
+        ):
132 132
             tmp_vlan = {}
133 133
             # Ingress is special network that has no vlan, only a subnet string
134 134
             # So treatment for ingress is different
135
-            if net_type is not 'ingress':
135
+            if net_type != "ingress":
136 136
                 # standardize the network name as net_type may ne different.
137 137
                 # For e.g insteas of pxe it may be PXE or instead of calico
138 138
                 # it may be ksn. Valid network names are pxe, calico, oob, oam,
139 139
                 # overlay, storage, ingress
140
-                tmp_vlan['name'] = self._get_network_name_from_vlan_name(
141
-                    net_type)
140
+                tmp_vlan["name"] = self._get_network_name_from_vlan_name(
141
+                    net_type
142
+                )
142 143
 
143 144
                 # extract vlan tag. It was extracted from xl file as 'VlAN 45'
144 145
                 # The code below extracts the numeric data fron net_val['vlan']
145
-                if net_val.get('vlan', "") is not "":
146
-                    value = re.findall(vlan_pattern, net_val['vlan'])
147
-                    tmp_vlan['vlan'] = value[0]
146
+                if net_val.get("vlan", "") != "":
147
+                    value = re.findall(vlan_pattern, net_val["vlan"])
148
+                    tmp_vlan["vlan"] = value[0]
148 149
                 else:
149
-                    tmp_vlan['vlan'] = "#CHANGE_ME"
150
+                    tmp_vlan["vlan"] = "#CHANGE_ME"
150 151
 
151
-                tmp_vlan['subnet'] = net_val.get('subnet', "#CHANGE_ME")
152
-                tmp_vlan['gateway'] = net_val.get('gateway', "#CHANGE_ME")
152
+                tmp_vlan["subnet"] = net_val.get("subnet", "#CHANGE_ME")
153
+                tmp_vlan["gateway"] = net_val.get("gateway", "#CHANGE_ME")
153 154
             else:
154
-                tmp_vlan['name'] = 'ingress'
155
-                tmp_vlan['subnet'] = net_val
155
+                tmp_vlan["name"] = "ingress"
156
+                tmp_vlan["subnet"] = net_val
156 157
             vlan_list.append(tmp_vlan)
157
-        LOG.debug("vlan list extracted from tugboat:\n{}".format(
158
-            pprint.pformat(vlan_list)))
158
+        LOG.debug(
159
+            "vlan list extracted from tugboat:\n{}".format(
160
+                pprint.pformat(vlan_list)
161
+            )
162
+        )
159 163
         return vlan_list
160 164
 
161 165
     def get_ips(self, region, host=None):
@@ -172,33 +176,34 @@ class TugboatPlugin(BaseDataSourcePlugin):
172 176
         """
173 177
 
174 178
         ip_ = {}
175
-        ipmi_data = self.parsed_xl_data['ipmi_data'][0]
179
+        ipmi_data = self.parsed_xl_data["ipmi_data"][0]
176 180
         ip_[host] = {
177
-            'oob': ipmi_data[host].get('ipmi_address', '#CHANGE_ME'),
178
-            'oam': ipmi_data[host].get('oam', '#CHANGE_ME'),
179
-            'calico': ipmi_data[host].get('calico', '#CHANGE_ME'),
180
-            'overlay': ipmi_data[host].get('overlay', '#CHANGE_ME'),
181
-            'pxe': ipmi_data[host].get('pxe', '#CHANGE_ME'),
182
-            'storage': ipmi_data[host].get('storage', '#CHANGE_ME')
181
+            "oob": ipmi_data[host].get("ipmi_address", "#CHANGE_ME"),
182
+            "oam": ipmi_data[host].get("oam", "#CHANGE_ME"),
183
+            "calico": ipmi_data[host].get("calico", "#CHANGE_ME"),
184
+            "overlay": ipmi_data[host].get("overlay", "#CHANGE_ME"),
185
+            "pxe": ipmi_data[host].get("pxe", "#CHANGE_ME"),
186
+            "storage": ipmi_data[host].get("storage", "#CHANGE_ME"),
183 187
         }
184 188
         return ip_
185 189
 
186 190
     def get_ldap_information(self, region):
187 191
         """ Extract ldap information from excel"""
188 192
 
189
-        ldap_raw_data = self.parsed_xl_data['site_info']['ldap']
193
+        ldap_raw_data = self.parsed_xl_data["site_info"]["ldap"]
190 194
         ldap_info = {}
191 195
         # raw url is 'url: ldap://example.com' so we are converting to
192 196
         # 'ldap://example.com'
193
-        url = ldap_raw_data.get('url', '#CHANGE_ME')
197
+        url = ldap_raw_data.get("url", "#CHANGE_ME")
194 198
         try:
195
-            ldap_info['url'] = url.split(' ')[1]
196
-            ldap_info['domain'] = url.split('.')[1]
199
+            ldap_info["url"] = url.split(" ")[1]
200
+            ldap_info["domain"] = url.split(".")[1]
197 201
         except IndexError as e:
198 202
             LOG.error("url.split:{}".format(e))
199
-        ldap_info['common_name'] = ldap_raw_data.get('common_name',
200
-                                                     '#CHANGE_ME')
201
-        ldap_info['subdomain'] = ldap_raw_data.get('subdomain', '#CHANGE_ME')
203
+        ldap_info["common_name"] = ldap_raw_data.get(
204
+            "common_name", "#CHANGE_ME"
205
+        )
206
+        ldap_info["subdomain"] = ldap_raw_data.get("subdomain", "#CHANGE_ME")
202 207
 
203 208
         return ldap_info
204 209
 
@@ -206,41 +211,44 @@ class TugboatPlugin(BaseDataSourcePlugin):
206 211
         """ Returns a comma separated list of ntp ip addresses"""
207 212
 
208 213
         ntp_server_list = self._get_formatted_server_list(
209
-            self.parsed_xl_data['site_info']['ntp'])
214
+            self.parsed_xl_data["site_info"]["ntp"]
215
+        )
210 216
         return ntp_server_list
211 217
 
212 218
     def get_dns_servers(self, region):
213 219
         """ Returns a comma separated list of dns ip addresses"""
214 220
         dns_server_list = self._get_formatted_server_list(
215
-            self.parsed_xl_data['site_info']['dns'])
221
+            self.parsed_xl_data["site_info"]["dns"]
222
+        )
216 223
         return dns_server_list
217 224
 
218 225
     def get_domain_name(self, region):
219 226
         """ Returns domain name extracted from excel file"""
220 227
 
221
-        return self.parsed_xl_data['site_info']['domain']
228
+        return self.parsed_xl_data["site_info"]["domain"]
222 229
 
223 230
     def get_location_information(self, region):
224 231
         """
225 232
         Prepare location data from information extracted
226 233
         by ExcelParser(i.e raw data)
227 234
         """
228
-        location_data = self.parsed_xl_data['site_info']['location']
235
+        location_data = self.parsed_xl_data["site_info"]["location"]
229 236
 
230
-        corridor_pattern = r'\d+'
231
-        corridor_number = re.findall(corridor_pattern,
232
-                                     location_data['corridor'])[0]
233
-        name = location_data.get('name', '#CHANGE_ME')
234
-        state = location_data.get('state', '#CHANGE_ME')
235
-        country = location_data.get('country', '#CHANGE_ME')
236
-        physical_location_id = location_data.get('physical_location', '')
237
+        corridor_pattern = r"\d+"
238
+        corridor_number = re.findall(
239
+            corridor_pattern, location_data["corridor"]
240
+        )[0]
241
+        name = location_data.get("name", "#CHANGE_ME")
242
+        state = location_data.get("state", "#CHANGE_ME")
243
+        country = location_data.get("country", "#CHANGE_ME")
244
+        physical_location_id = location_data.get("physical_location", "")
237 245
 
238 246
         return {
239
-            'name': name,
240
-            'physical_location_id': physical_location_id,
241
-            'state': state,
242
-            'country': country,
243
-            'corridor': 'c{}'.format(corridor_number),
247
+            "name": name,
248
+            "physical_location_id": physical_location_id,
249
+            "state": state,
250
+            "country": country,
251
+            "corridor": "c{}".format(corridor_number),
244 252
         }
245 253
 
246 254
     def get_racks(self, region):
@@ -277,29 +285,35 @@ class TugboatPlugin(BaseDataSourcePlugin):
277 285
             vlan_name contains "pxe" the network name is "pxe"
278 286
         """
279 287
         network_names = [
280
-            'ksn|calico', 'storage', 'oam|server', 'ovs|overlay', 'oob', 'pxe'
288
+            "ksn|calico",
289
+            "storage",
290
+            "oam|server",
291
+            "ovs|overlay",
292
+            "oob",
293
+            "pxe",
281 294
         ]
282 295
         for name in network_names:
283 296
             # Make a pattern that would ignore case.
284 297
             # if name is 'ksn' pattern name is '(?i)(ksn)'
285 298
             name_pattern = "(?i)({})".format(name)
286 299
             if re.search(name_pattern, vlan_name):
287
-                if name is 'ksn|calico':
288
-                    return 'calico'
289
-                if name is 'storage':
290
-                    return 'storage'
291
-                if name is 'oam|server':
292
-                    return 'oam'
293
-                if name is 'ovs|overlay':
294
-                    return 'overlay'
295
-                if name is 'oob':
296
-                    return 'oob'
297
-                if name is 'pxe':
298
-                    return 'pxe'
300
+                if name == "ksn|calico":
301
+                    return "calico"
302
+                if name == "storage":
303
+                    return "storage"
304
+                if name == "oam|server":
305
+                    return "oam"
306
+                if name == "ovs|overlay":
307
+                    return "overlay"
308
+                if name == "oob":
309
+                    return "oob"
310
+                if name == "pxe":
311
+                    return "pxe"
299 312
         # if nothing matches
300 313
         LOG.error(
301
-            "Unable to recognize VLAN name extracted from Plugin data source")
302
-        return ("")
314
+            "Unable to recognize VLAN name extracted from Plugin data source"
315
+        )
316
+        return ""
303 317
 
304 318
     def _get_formatted_server_list(self, server_list):
305 319
         """ Format dns and ntp server list as comma separated string """
@@ -309,9 +323,9 @@ class TugboatPlugin(BaseDataSourcePlugin):
309 323
         # The function returns a list of comma separated dns ip addresses
310 324
         servers = []
311 325
         for data in server_list:
312
-            if '(' not in data:
326
+            if "(" not in data:
313 327
                 servers.append(data)
314
-        formatted_server_list = ','.join(servers)
328
+        formatted_server_list = ",".join(servers)
315 329
         return formatted_server_list
316 330
 
317 331
     def _get_rack(self, host):
@@ -319,7 +333,7 @@ class TugboatPlugin(BaseDataSourcePlugin):
319 333
         Get rack id  from the rack string extracted
320 334
         from xl
321 335
         """
322
-        rack_pattern = r'\w.*(r\d+)\w.*'
336
+        rack_pattern = r"\w.*(r\d+)\w.*"
323 337
         rack = re.findall(rack_pattern, host)[0]
324 338
         if not self.region:
325 339
             self.region = host.split(rack)[0]
@@ -328,7 +342,7 @@ class TugboatPlugin(BaseDataSourcePlugin):
328 342
     def _get_rackwise_hosts(self):
329 343
         """ Mapping hosts with rack ids """
330 344
         rackwise_hosts = {}
331
-        hostnames = self.parsed_xl_data['ipmi_data'][1]
345
+        hostnames = self.parsed_xl_data["ipmi_data"][1]
332 346
         racks = self._get_rack_data()
333 347
         for rack in racks:
334 348
             if rack not in rackwise_hosts:
@@ -343,8 +357,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
343 357
         """ Format rack name """
344 358
         LOG.info("Getting rack data")
345 359
         racks = {}
346
-        hostnames = self.parsed_xl_data['ipmi_data'][1]
360
+        hostnames = self.parsed_xl_data["ipmi_data"][1]
347 361
         for host in hostnames:
348 362
             rack = self._get_rack(host)
349
-            racks[rack] = rack.replace('r', 'rack')
363
+            racks[rack] = rack.replace("r", "rack")
350 364
         return racks

+ 126
- 93
spyglass/parser/engine.py View File

@@ -28,7 +28,7 @@ import yaml
28 28
 LOG = logging.getLogger(__name__)
29 29
 
30 30
 
31
-class ProcessDataSource():
31
+class ProcessDataSource:
32 32
     def __init__(self, sitetype):
33 33
         # Initialize intermediary and save site type
34 34
         self._initialize_intermediary()
@@ -36,18 +36,18 @@ class ProcessDataSource():
36 36
 
37 37
     @staticmethod
38 38
     def _read_file(file_name):
39
-        with open(file_name, 'r') as f:
39
+        with open(file_name, "r") as f:
40 40
             raw_data = f.read()
41 41
         return raw_data
42 42
 
43 43
     def _initialize_intermediary(self):
44 44
         self.host_type = {}
45 45
         self.data = {
46
-            'network': {},
47
-            'baremetal': {},
48
-            'region_name': '',
49
-            'storage': {},
50
-            'site_info': {},
46
+            "network": {},
47
+            "baremetal": {},
48
+            "region_name": "",
49
+            "storage": {},
50
+            "site_info": {},
51 51
         }
52 52
         self.sitetype = None
53 53
         self.genesis_node = None
@@ -62,37 +62,43 @@ class ProcessDataSource():
62 62
         we assign only the first subnet """
63 63
         LOG.info("Extracting network subnets")
64 64
         network_subnets = {}
65
-        for net_type in self.data['network']['vlan_network_data']:
65
+        for net_type in self.data["network"]["vlan_network_data"]:
66 66
             # One of the type is ingress and we don't want that here
67
-            if (net_type != 'ingress'):
67
+            if net_type != "ingress":
68 68
                 network_subnets[net_type] = netaddr.IPNetwork(
69
-                    self.data['network']['vlan_network_data'][net_type]
70
-                    ['subnet'][0])
71
-
72
-        LOG.debug("Network subnets:\n{}".format(
73
-            pprint.pformat(network_subnets)))
69
+                    self.data["network"]["vlan_network_data"][net_type][
70
+                        "subnet"
71
+                    ][0]
72
+                )
73
+
74
+        LOG.debug(
75
+            "Network subnets:\n{}".format(pprint.pformat(network_subnets))
76
+        )
74 77
         return network_subnets
75 78
 
76 79
     def _get_genesis_node_details(self):
77 80
         # Get genesis host node details from the hosts based on host type
78
-        for racks in self.data['baremetal'].keys():
79
-            rack_hosts = self.data['baremetal'][racks]
81
+        for racks in self.data["baremetal"].keys():
82
+            rack_hosts = self.data["baremetal"][racks]
80 83
             for host in rack_hosts:
81
-                if rack_hosts[host]['type'] == 'genesis':
84
+                if rack_hosts[host]["type"] == "genesis":
82 85
                     self.genesis_node = rack_hosts[host]
83
-                    self.genesis_node['name'] = host
84
-        LOG.debug("Genesis Node Details:\n{}".format(
85
-            pprint.pformat(self.genesis_node)))
86
+                    self.genesis_node["name"] = host
87
+        LOG.debug(
88
+            "Genesis Node Details:\n{}".format(
89
+                pprint.pformat(self.genesis_node)
90
+            )
91
+        )
86 92
 
87 93
     def _get_genesis_node_ip(self):
88 94
         """ Returns the genesis node ip """
89
-        ip = '0.0.0.0'
95
+        ip = "0.0.0.0"
90 96
         LOG.info("Getting Genesis Node IP")
91 97
         if not self.genesis_node:
92 98
             self._get_genesis_node_details()
93
-        ips = self.genesis_node.get('ip', '')
99
+        ips = self.genesis_node.get("ip", "")
94 100
         if ips:
95
-            ip = ips.get('oam', '0.0.0.0')
101
+            ip = ips.get("oam", "0.0.0.0")
96 102
         return ip
97 103
 
98 104
     def _validate_intermediary_data(self, data):
@@ -103,21 +109,21 @@ class ProcessDataSource():
103 109
         The method validates this with regex pattern defined for each
104 110
         data type.
105 111
         """
106
-        LOG.info('Validating Intermediary data')
112
+        LOG.info("Validating Intermediary data")
107 113
         temp_data = {}
108 114
         # Peforming a deep copy
109 115
         temp_data = copy.deepcopy(data)
110 116
         # Converting baremetal dict to list.
111 117
         baremetal_list = []
112
-        for rack in temp_data['baremetal'].keys():
113
-            temp = [{k: v} for k, v in temp_data['baremetal'][rack].items()]
118
+        for rack in temp_data["baremetal"].keys():
119
+            temp = [{k: v} for k, v in temp_data["baremetal"][rack].items()]
114 120
             baremetal_list = baremetal_list + temp
115 121
 
116
-        temp_data['baremetal'] = baremetal_list
117
-        schema_dir = pkg_resources.resource_filename('spyglass', 'schemas/')
122
+        temp_data["baremetal"] = baremetal_list
123
+        schema_dir = pkg_resources.resource_filename("spyglass", "schemas/")
118 124
         schema_file = schema_dir + "data_schema.json"
119 125
         json_data = json.loads(json.dumps(temp_data))
120
-        with open(schema_file, 'r') as f:
126
+        with open(schema_file, "r") as f:
121 127
             json_schema = json.load(f)
122 128
         try:
123 129
             # Suppressing writing of data2.json. Can use it for debugging
@@ -152,14 +158,14 @@ class ProcessDataSource():
152 158
         based on rule name and applies them to appropriate data objects.
153 159
         """
154 160
         LOG.info("Apply design rules")
155
-        rules_dir = pkg_resources.resource_filename('spyglass', 'config/')
156
-        rules_file = rules_dir + 'rules.yaml'
161
+        rules_dir = pkg_resources.resource_filename("spyglass", "config/")
162
+        rules_file = rules_dir + "rules.yaml"
157 163
         rules_data_raw = self._read_file(rules_file)
158 164
         rules_yaml = yaml.safe_load(rules_data_raw)
159 165
         rules_data = {}
160 166
         rules_data.update(rules_yaml)
161 167
         for rule in rules_data.keys():
162
-            rule_name = rules_data[rule]['name']
168
+            rule_name = rules_data[rule]["name"]
163 169
             function_str = "_apply_rule_" + rule_name
164 170
             rule_data_name = rules_data[rule][rule_name]
165 171
             function = getattr(self, function_str)
@@ -182,23 +188,25 @@ class ProcessDataSource():
182 188
         compute or controller based on host_profile. For defining 'genesis'
183 189
         the first controller host is defined as genesis."""
184 190
         is_genesis = False
185
-        hardware_profile = rule_data[self.data['site_info']['sitetype']]
191
+        hardware_profile = rule_data[self.data["site_info"]["sitetype"]]
186 192
         # Getting individual racks. The racks are sorted to ensure that the
187 193
         # first controller of the first rack is assigned as 'genesis' node.
188
-        for rack in sorted(self.data['baremetal'].keys()):
194
+        for rack in sorted(self.data["baremetal"].keys()):
189 195
             # Getting individual hosts in each rack. Sorting of the hosts are
190 196
             # done to determine the genesis node.
191
-            for host in sorted(self.data['baremetal'][rack].keys()):
192
-                host_info = self.data['baremetal'][rack][host]
193
-                if (host_info['host_profile'] == hardware_profile[
194
-                        'profile_name']['ctrl']):
197
+            for host in sorted(self.data["baremetal"][rack].keys()):
198
+                host_info = self.data["baremetal"][rack][host]
199
+                if (
200
+                    host_info["host_profile"]
201
+                    == hardware_profile["profile_name"]["ctrl"]
202
+                ):
195 203
                     if not is_genesis:
196
-                        host_info['type'] = 'genesis'
204
+                        host_info["type"] = "genesis"
197 205
                         is_genesis = True
198 206
                     else:
199
-                        host_info['type'] = 'controller'
207
+                        host_info["type"] = "controller"
200 208
                 else:
201
-                    host_info['type'] = 'compute'
209
+                    host_info["type"] = "compute"
202 210
 
203 211
     def _apply_rule_ip_alloc_offset(self, rule_data):
204 212
         """ Apply  offset rules to update baremetal host ip's and vlan network
@@ -219,21 +227,24 @@ class ProcessDataSource():
219 227
         If a particular ip exists it is overridden."""
220 228
 
221 229
         # Ger defult ip offset
222
-        default_ip_offset = rule_data['default']
230
+        default_ip_offset = rule_data["default"]
223 231
 
224 232
         host_idx = 0
225 233
         LOG.info("Update baremetal host ip's")
226
-        for racks in self.data['baremetal'].keys():
227
-            rack_hosts = self.data['baremetal'][racks]
234
+        for racks in self.data["baremetal"].keys():
235
+            rack_hosts = self.data["baremetal"][racks]
228 236
             for host in rack_hosts:
229
-                host_networks = rack_hosts[host]['ip']
237
+                host_networks = rack_hosts[host]["ip"]
230 238
                 for net in host_networks:
231 239
                     ips = list(self.network_subnets[net])
232 240
                     host_networks[net] = str(ips[host_idx + default_ip_offset])
233 241
                 host_idx = host_idx + 1
234 242
 
235
-        LOG.debug("Updated baremetal host:\n{}".format(
236
-            pprint.pformat(self.data['baremetal'])))
243
+        LOG.debug(
244
+            "Updated baremetal host:\n{}".format(
245
+                pprint.pformat(self.data["baremetal"])
246
+            )
247
+        )
237 248
 
238 249
     def _update_vlan_net_data(self, rule_data):
239 250
         """ Offset allocation rules to determine ip address range(s)
@@ -245,31 +256,37 @@ class ProcessDataSource():
245 256
         LOG.info("Apply network design rules")
246 257
 
247 258
         # Collect Rules
248
-        default_ip_offset = rule_data['default']
249
-        oob_ip_offset = rule_data['oob']
250
-        gateway_ip_offset = rule_data['gateway']
251
-        ingress_vip_offset = rule_data['ingress_vip']
259
+        default_ip_offset = rule_data["default"]
260
+        oob_ip_offset = rule_data["oob"]
261
+        gateway_ip_offset = rule_data["gateway"]
262
+        ingress_vip_offset = rule_data["ingress_vip"]
252 263
         # static_ip_end_offset for non pxe network
253
-        static_ip_end_offset = rule_data['static_ip_end']
264
+        static_ip_end_offset = rule_data["static_ip_end"]
254 265
         # dhcp_ip_end_offset for pxe network
255
-        dhcp_ip_end_offset = rule_data['dhcp_ip_end']
266
+        dhcp_ip_end_offset = rule_data["dhcp_ip_end"]
256 267
 
257 268
         # Set ingress vip and CIDR for bgp
258 269
         LOG.info("Apply network design rules:bgp")
259 270
         subnet = netaddr.IPNetwork(
260
-            self.data['network']['vlan_network_data']['ingress']['subnet'][0])
271
+            self.data["network"]["vlan_network_data"]["ingress"]["subnet"][0]
272
+        )
261 273
         ips = list(subnet)
262
-        self.data['network']['bgp']['ingress_vip'] = str(
263
-            ips[ingress_vip_offset])
264
-        self.data['network']['bgp']['public_service_cidr'] = self.data[
265
-            'network']['vlan_network_data']['ingress']['subnet'][0]
266
-        LOG.debug("Updated network bgp data:\n{}".format(
267
-            pprint.pformat(self.data['network']['bgp'])))
274
+        self.data["network"]["bgp"]["ingress_vip"] = str(
275
+            ips[ingress_vip_offset]
276
+        )
277
+        self.data["network"]["bgp"]["public_service_cidr"] = self.data[
278
+            "network"
279
+        ]["vlan_network_data"]["ingress"]["subnet"][0]
280
+        LOG.debug(
281
+            "Updated network bgp data:\n{}".format(
282
+                pprint.pformat(self.data["network"]["bgp"])
283
+            )
284
+        )
268 285
 
269 286
         LOG.info("Apply network design rules:vlan")
270 287
         # Apply rules to vlan networks
271 288
         for net_type in self.network_subnets:
272
-            if net_type == 'oob':
289
+            if net_type == "oob":
273 290
                 ip_offset = oob_ip_offset
274 291
             else:
275 292
                 ip_offset = default_ip_offset
@@ -277,49 +294,60 @@ class ProcessDataSource():
277 294
             subnet = self.network_subnets[net_type]
278 295
             ips = list(subnet)
279 296
 
280
-            self.data['network']['vlan_network_data'][net_type][
281
-                'gateway'] = str(ips[gateway_ip_offset])
297
+            self.data["network"]["vlan_network_data"][net_type][
298
+                "gateway"
299
+            ] = str(ips[gateway_ip_offset])
282 300
 
283
-            self.data['network']['vlan_network_data'][net_type][
284
-                'reserved_start'] = str(ips[1])
285
-            self.data['network']['vlan_network_data'][net_type][
286
-                'reserved_end'] = str(ips[ip_offset])
301
+            self.data["network"]["vlan_network_data"][net_type][
302
+                "reserved_start"
303
+            ] = str(ips[1])
304
+            self.data["network"]["vlan_network_data"][net_type][
305
+                "reserved_end"
306
+            ] = str(ips[ip_offset])
287 307
 
288 308
             static_start = str(ips[ip_offset + 1])
289 309
             static_end = str(ips[static_ip_end_offset])
290 310
 
291
-            if net_type == 'pxe':
311
+            if net_type == "pxe":
292 312
                 mid = len(ips) // 2
293 313
                 static_end = str(ips[mid - 1])
294 314
                 dhcp_start = str(ips[mid])
295 315
                 dhcp_end = str(ips[dhcp_ip_end_offset])
296 316
 
297
-                self.data['network']['vlan_network_data'][net_type][
298
-                    'dhcp_start'] = dhcp_start
299
-                self.data['network']['vlan_network_data'][net_type][
300
-                    'dhcp_end'] = dhcp_end
317
+                self.data["network"]["vlan_network_data"][net_type][
318
+                    "dhcp_start"
319
+                ] = dhcp_start
320
+                self.data["network"]["vlan_network_data"][net_type][
321
+                    "dhcp_end"
322
+                ] = dhcp_end
301 323
 
302
-            self.data['network']['vlan_network_data'][net_type][
303
-                'static_start'] = static_start
304
-            self.data['network']['vlan_network_data'][net_type][
305
-                'static_end'] = static_end
324
+            self.data["network"]["vlan_network_data"][net_type][
325
+                "static_start"
326
+            ] = static_start
327
+            self.data["network"]["vlan_network_data"][net_type][
328
+                "static_end"
329
+            ] = static_end
306 330
 
307 331
             # There is no vlan for oob network
308
-            if (net_type != 'oob'):
309
-                self.data['network']['vlan_network_data'][net_type][
310
-                    'vlan'] = self.data['network']['vlan_network_data'][
311
-                        net_type]['vlan']
332
+            if net_type != "oob":
333
+                self.data["network"]["vlan_network_data"][net_type][
334
+                    "vlan"
335
+                ] = self.data["network"]["vlan_network_data"][net_type]["vlan"]
312 336
 
313 337
             # OAM have default routes. Only for cruiser. TBD
314
-            if (net_type == 'oam'):
338
+            if net_type == "oam":
315 339
                 routes = ["0.0.0.0/0"]
316 340
             else:
317 341
                 routes = []
318
-            self.data['network']['vlan_network_data'][net_type][
319
-                'routes'] = routes
342
+            self.data["network"]["vlan_network_data"][net_type][
343
+                "routes"
344
+            ] = routes
320 345
 
321
-        LOG.debug("Updated vlan network data:\n{}".format(
322
-            pprint.pformat(self.data['network']['vlan_network_data'])))
346
+        LOG.debug(
347
+            "Updated vlan network data:\n{}".format(
348
+                pprint.pformat(self.data["network"]["vlan_network_data"])
349
+            )
350
+        )
323 351
 
324 352
     def load_extracted_data_from_data_source(self, extracted_data):
325 353
         """
@@ -334,8 +362,11 @@ class ProcessDataSource():
334 362
 
335 363
         LOG.info("Loading plugin data source")
336 364
         self.data = extracted_data
337
-        LOG.debug("Extracted data from plugin:\n{}".format(
338
-            pprint.pformat(extracted_data)))
365
+        LOG.debug(
366
+            "Extracted data from plugin:\n{}".format(
367
+                pprint.pformat(extracted_data)
368
+            )
369
+        )
339 370
         # Uncommeent following segment for debugging purpose.
340 371
         # extracted_file = "extracted_file.yaml"
341 372
         # yaml_file = yaml.dump(extracted_data, default_flow_style=False)
@@ -344,13 +375,14 @@ class ProcessDataSource():
344 375
         # f.close()
345 376
 
346 377
         # Append region_data supplied from CLI to self.data
347
-        self.data['region_name'] = self.region_name
378
+        self.data["region_name"] = self.region_name
348 379
 
349 380
     def dump_intermediary_file(self, intermediary_dir):
350 381
         """ Writing intermediary yaml """
351 382
         LOG.info("Writing intermediary yaml")
352 383
         intermediary_file = "{}_intermediary.yaml".format(
353
-            self.data['region_name'])
384
+            self.data["region_name"]
385
+        )
354 386
         # Check of if output dir = intermediary_dir exists
355 387
         if intermediary_dir is not None:
356 388
             outfile = "{}/{}".format(intermediary_dir, intermediary_file)
@@ -358,7 +390,7 @@ class ProcessDataSource():
358 390
             outfile = intermediary_file
359 391
         LOG.info("Intermediary file:{}".format(outfile))
360 392
         yaml_file = yaml.dump(self.data, default_flow_style=False)
361
-        with open(outfile, 'w') as f:
393
+        with open(outfile, "w") as f:
362 394
             f.write(yaml_file)
363 395
         f.close()
364 396
 
@@ -379,10 +411,11 @@ class ProcessDataSource():
379 411
     def edit_intermediary_yaml(self):
380 412
         """ Edit generated data using on browser """
381 413
         LOG.info(
382
-            "edit_intermediary_yaml: Invoking web server for yaml editing")
383
-        with tempfile.NamedTemporaryFile(mode='r+') as file_obj:
414
+            "edit_intermediary_yaml: Invoking web server for yaml editing"
415
+        )
416
+        with tempfile.NamedTemporaryFile(mode="r+") as file_obj:
384 417
             yaml.safe_dump(self.data, file_obj, default_flow_style=False)
385 418
             host = self._get_genesis_node_ip()
386
-            os.system('yaml-editor -f {0} -h {1}'.format(file_obj.name, host))
419
+            os.system("yaml-editor -f {0} -h {1}".format(file_obj.name, host))
387 420
             file_obj.seek(0)
388 421
             self.data = yaml.safe_load(file_obj)

+ 13
- 16
spyglass/site_processors/base.py View File

@@ -22,23 +22,20 @@ class BaseProcessor:
22 22
 
23 23
     @staticmethod
24 24
     def get_role_wise_nodes(yaml_data):
25
-        hosts = {
26
-            'genesis': {},
27
-            'masters': [],
28
-            'workers': [],
29
-        }
25
+        hosts = {"genesis": {}, "masters": [], "workers": []}
30 26
 
31
-        for rack in yaml_data['baremetal']:
32
-            for host in yaml_data['baremetal'][rack]:
33
-                if yaml_data['baremetal'][rack][host]['type'] == 'genesis':
34
-                    hosts['genesis'] = {
35
-                        'name': host,
36
-                        'pxe': yaml_data['baremetal'][rack][host]['ip']['pxe'],
37
-                        'oam': yaml_data['baremetal'][rack][host]['ip']['oam'],
27
+        for rack in yaml_data["baremetal"]:
28
+            for host in yaml_data["baremetal"][rack]:
29
+                if yaml_data["baremetal"][rack][host]["type"] == "genesis":
30
+                    hosts["genesis"] = {
31
+                        "name": host,
32
+                        "pxe": yaml_data["baremetal"][rack][host]["ip"]["pxe"],
33
+                        "oam": yaml_data["baremetal"][rack][host]["ip"]["oam"],
38 34
                     }
39
-                elif yaml_data['baremetal'][rack][host][
40
-                        'type'] == 'controller':
41
-                    hosts['masters'].append(host)
35
+                elif (
36
+                    yaml_data["baremetal"][rack][host]["type"] == "controller"
37
+                ):
38
+                    hosts["masters"].append(host)
42 39
                 else:
43
-                    hosts['workers'].append(host)
40
+                    hosts["workers"].append(host)
44 41
         return hosts

+ 18
- 12
spyglass/site_processors/site_processor.py View File

@@ -36,12 +36,12 @@ class SiteProcessor(BaseProcessor):
36 36
         """
37 37
         # Check of manifest_dir exists
38 38
         if self.manifest_dir is not None:
39
-            site_manifest_dir = self.manifest_dir + '/pegleg_manifests/site/'
39
+            site_manifest_dir = self.manifest_dir + "/pegleg_manifests/site/"
40 40
         else:
41
-            site_manifest_dir = 'pegleg_manifests/site/'
41
+            site_manifest_dir = "pegleg_manifests/site/"
42 42
         LOG.info("Site manifest output dir:{}".format(site_manifest_dir))
43 43
 
44
-        template_software_dir = template_dir + '/'
44
+        template_software_dir = template_dir + "/"
45 45
         template_dir_abspath = os.path.dirname(template_software_dir)
46 46
         LOG.debug("Template Path:%s", template_dir_abspath)
47 47
 
@@ -50,16 +50,19 @@ class SiteProcessor(BaseProcessor):
50 50
                 j2_env = Environment(
51 51
                     autoescape=False,
52 52
                     loader=FileSystemLoader(dirpath),
53
-                    trim_blocks=True)
53
+                    trim_blocks=True,
54
+                )
54 55
                 j2_env.filters[
55
-                    'get_role_wise_nodes'] = self.get_role_wise_nodes
56
+                    "get_role_wise_nodes"
57
+                ] = self.get_role_wise_nodes
56 58
                 templatefile = os.path.join(dirpath, filename)
57
-                outdirs = dirpath.split('templates')[1]
59
+                outdirs = dirpath.split("templates")[1]
58 60
 
59
-                outfile_path = '{}{}{}'.format(
60
-                    site_manifest_dir, self.yaml_data['region_name'], outdirs)
61
-                outfile_yaml = templatefile.split('.j2')[0].split('/')[-1]
62
-                outfile = outfile_path + '/' + outfile_yaml
61
+                outfile_path = "{}{}{}".format(
62
+                    site_manifest_dir, self.yaml_data["region_name"], outdirs
63
+                )
64
+                outfile_yaml = templatefile.split(".j2")[0].split("/")[-1]
65
+                outfile = outfile_path + "/" + outfile_yaml
63 66
                 outfile_dir = os.path.dirname(outfile)
64 67
                 if not os.path.exists(outfile_dir):
65 68
                     os.makedirs(outfile_dir)
@@ -71,7 +74,10 @@ class SiteProcessor(BaseProcessor):
71 74
                     out.close()
72 75
                 except IOError as ioe:
73 76
                     LOG.error(
74
-                        "IOError during rendering:{}".format(outfile_yaml))
77
+                        "IOError during rendering:{}".format(outfile_yaml)
78
+                    )
75 79
                     raise SystemExit(
76 80
                         "Error when generating {:s}:\n{:s}".format(
77
-                            outfile, ioe.strerror))
81
+                            outfile, ioe.strerror
82
+                        )
83
+                    )

+ 94
- 67
spyglass/spyglass.py View File

@@ -22,98 +22,115 @@ import yaml
22 22
 from spyglass.parser.engine import ProcessDataSource
23 23
 from spyglass.site_processors.site_processor import SiteProcessor
24 24
 
25
-LOG = logging.getLogger('spyglass')
25
+LOG = logging.getLogger("spyglass")
26 26
 
27 27
 
28 28
 @click.command()
29 29
 @click.option(
30
-    '--site',
31
-    '-s',
32
-    help='Specify the site for which manifests to be generated')
30
+    "--site", "-s", help="Specify the site for which manifests to be generated"
31
+)
33 32
 @click.option(
34
-    '--type', '-t', help='Specify the plugin type formation or tugboat')
35
-@click.option('--formation_url', '-f', help='Specify the formation url')
36
-@click.option('--formation_user', '-u', help='Specify the formation user id')
33
+    "--type", "-t", help="Specify the plugin type formation or tugboat"
34
+)
35
+@click.option("--formation_url", "-f", help="Specify the formation url")
36
+@click.option("--formation_user", "-u", help="Specify the formation user id")
37 37
 @click.option(
38
-    '--formation_password', '-p', help='Specify the formation user password')
38
+    "--formation_password", "-p", help="Specify the formation user password"
39
+)
39 40
 @click.option(
40
-    '--intermediary',
41
-    '-i',
41
+    "--intermediary",
42
+    "-i",
42 43
     type=click.Path(exists=True),
43
-    help=
44
-    'Intermediary file path  generate manifests, use -m also with this option')
44
+    help=(
45
+        "Intermediary file path generate manifests, "
46
+        "use -m also with this option"
47
+    ),
48
+)
45 49
 @click.option(
46
-    '--additional_config',
47
-    '-d',
50
+    "--additional_config",
51
+    "-d",
48 52
     type=click.Path(exists=True),
49
-    help='Site specific configuraton details')
53
+    help="Site specific configuraton details",
54
+)
50 55
 @click.option(
51
-    '--generate_intermediary',
52
-    '-g',
56
+    "--generate_intermediary",
57
+    "-g",
53 58
     is_flag=True,
54
-    help='Dump intermediary file from passed excel and excel spec')
59
+    help="Dump intermediary file from passed excel and excel spec",
60
+)
55 61
 @click.option(
56
-    '--intermediary_dir',
57
-    '-idir',
62
+    "--intermediary_dir",
63
+    "-idir",
58 64
     type=click.Path(exists=True),
59
-    help='The path where intermediary file needs to be generated')
65
+    help="The path where intermediary file needs to be generated",
66
+)
60 67
 @click.option(
61
-    '--edit_intermediary/--no_edit_intermediary',
62
-    '-e/-nedit',
68
+    "--edit_intermediary/--no_edit_intermediary",
69
+    "-e/-nedit",
63 70
     default=True,
64
-    help='Flag to let user edit intermediary')
71
+    help="Flag to let user edit intermediary",
72
+)
65 73
 @click.option(
66
-    '--generate_manifests',
67
-    '-m',
74
+    "--generate_manifests",
75
+    "-m",
68 76
     is_flag=True,
69
-    help='Generate manifests from the generated intermediary file')
77
+    help="Generate manifests from the generated intermediary file",
78
+)
70 79
 @click.option(
71
-    '--manifest_dir',
72
-    '-mdir',
80
+    "--manifest_dir",
81
+    "-mdir",
73 82
     type=click.Path(exists=True),
74
-    help='The path where manifest files needs to be generated')
83
+    help="The path where manifest files needs to be generated",
84
+)
75 85
 @click.option(
76
-    '--template_dir',
77
-    '-tdir',
86
+    "--template_dir",
87
+    "-tdir",
78 88
     type=click.Path(exists=True),
79
-    help='The path where J2 templates are available')
89
+    help="The path where J2 templates are available",
90
+)
80 91
 @click.option(
81
-    '--excel',
82
-    '-x',
92
+    "--excel",
93
+    "-x",
83 94
     multiple=True,
84 95
     type=click.Path(exists=True),
85
-    help=
86
-    'Path to engineering excel file, to be passed with generate_intermediary')
96
+    help=(
97
+        "Path to engineering excel file, to be passed with "
98
+        "generate_intermediary"
99
+    ),
100
+)
87 101
 @click.option(
88
-    '--excel_spec',
89
-    '-e',
102
+    "--excel_spec",
103
+    "-e",
90 104
     type=click.Path(exists=True),
91
-    help='Path to excel spec, to be passed with generate_intermediary')
105
+    help="Path to excel spec, to be passed with generate_intermediary",
106
+)
92 107
 @click.option(
93
-    '--loglevel',
94
-    '-l',
108
+    "--loglevel",
109
+    "-l",
95 110
     default=20,
96 111
     multiple=False,
97 112
     show_default=True,
98
-    help='Loglevel NOTSET:0 ,DEBUG:10, \
99
-    INFO:20, WARNING:30, ERROR:40, CRITICAL:50')
113
+    help="Loglevel NOTSET:0 ,DEBUG:10, \
114
+    INFO:20, WARNING:30, ERROR:40, CRITICAL:50",
115
+)
100 116
 def main(*args, **kwargs):
101 117
     # Extract user provided inputs
102
-    generate_intermediary = kwargs['generate_intermediary']
103
-    intermediary_dir = kwargs['intermediary_dir']
104
-    edit_intermediary = kwargs['edit_intermediary']
105
-    generate_manifests = kwargs['generate_manifests']
106
-    manifest_dir = kwargs['manifest_dir']
107
-    intermediary = kwargs['intermediary']
108
-    site = kwargs['site']
109
-    template_dir = kwargs['template_dir']
110
-    loglevel = kwargs['loglevel']
118
+    generate_intermediary = kwargs["generate_intermediary"]
119
+    intermediary_dir = kwargs["intermediary_dir"]
120
+    edit_intermediary = kwargs["edit_intermediary"]
121
+    generate_manifests = kwargs["generate_manifests"]
122
+    manifest_dir = kwargs["manifest_dir"]
123
+    intermediary = kwargs["intermediary"]
124
+    site = kwargs["site"]
125
+    template_dir = kwargs["template_dir"]
126
+    loglevel = kwargs["loglevel"]
111 127
 
112 128
     # Set Logging format
113 129
     LOG.setLevel(loglevel)
114 130
     stream_handle = logging.StreamHandler()
115 131
     formatter = logging.Formatter(
116
-        '(%(name)s): %(asctime)s %(levelname)s %(message)s')
132
+        "(%(name)s): %(asctime)s %(levelname)s %(message)s"
133
+    )
117 134
     stream_handle.setFormatter(formatter)
118 135
     LOG.addHandler(stream_handle)
119 136
 
@@ -139,19 +156,21 @@ def main(*args, **kwargs):
139 156
     intermediary_yaml = {}
140 157
     if intermediary is None:
141 158
         LOG.info("Generating Intermediary yaml")
142
-        plugin_type = kwargs.get('type', None)
159
+        plugin_type = kwargs.get("type", None)
143 160
         plugin_class = None
144 161
 
145 162
         # Discover the plugin and load the plugin class
146 163
         LOG.info("Load the plugin class")
147 164
         for entry_point in pkg_resources.iter_entry_points(
148
-                'data_extractor_plugins'):
165
+            "data_extractor_plugins"
166
+        ):
149 167
             if entry_point.name == plugin_type:
150 168
                 plugin_class = entry_point.load()
151 169
 
152 170
         if plugin_class is None:
153 171
             LOG.error(
154
-                "Unsupported Plugin type. Plugin type:{}".format(plugin_type))
172
+                "Unsupported Plugin type. Plugin type:{}".format(plugin_type)
173
+            )
155 174
             exit()
156 175
 
157 176
         # Extract data from plugin data source
@@ -162,16 +181,22 @@ def main(*args, **kwargs):
162 181
         data_extractor.extract_data()
163 182
 
164 183
         # Apply any additional_config provided by user
165
-        additional_config = kwargs.get('additional_config', None)
184
+        additional_config = kwargs.get("additional_config", None)
166 185
         if additional_config is not None:
167
-            with open(additional_config, 'r') as config:
186
+            with open(additional_config, "r") as config:
168 187
                 raw_data = config.read()
169 188
                 additional_config_data = yaml.safe_load(raw_data)
170
-            LOG.debug("Additional config data:\n{}".format(
171
-                pprint.pformat(additional_config_data)))
189
+            LOG.debug(
190
+                "Additional config data:\n{}".format(
191
+                    pprint.pformat(additional_config_data)
192
+                )
193
+            )
172 194
 
173
-            LOG.info("Apply additional configuration from:{}".format(
174
-                additional_config))
195
+            LOG.info(
196
+                "Apply additional configuration from:{}".format(
197
+                    additional_config
198
+                )
199
+            )
175 200
             data_extractor.apply_additional_data(additional_config_data)
176 201
             LOG.debug(pprint.pformat(data_extractor.site_data))
177 202
 
@@ -179,14 +204,16 @@ def main(*args, **kwargs):
179 204
         LOG.info("Apply design rules to the extracted data")
180 205
         process_input_ob = ProcessDataSource(site)
181 206
         process_input_ob.load_extracted_data_from_data_source(
182
-            data_extractor.site_data)
207
+            data_extractor.site_data
208
+        )
183 209
 
184 210
         LOG.info("Generate intermediary yaml")
185 211
         intermediary_yaml = process_input_ob.generate_intermediary_yaml(
186
-            edit_intermediary)
212
+            edit_intermediary
213
+        )
187 214
     else:
188 215
         LOG.info("Loading intermediary from user provided input")
189
-        with open(intermediary, 'r') as intermediary_file:
216
+        with open(intermediary, "r") as intermediary_file:
190 217
             raw_data = intermediary_file.read()
191 218
             intermediary_yaml = yaml.safe_load(raw_data)
192 219
 
@@ -201,5 +228,5 @@ def main(*args, **kwargs):
201 228
     LOG.info("Spyglass Execution Completed")
202 229
 
203 230
 
204
-if __name__ == '__main__':
231
+if __name__ == "__main__":
205 232
     main()

+ 66
- 53
spyglass/utils/editor/editor.py View File

@@ -26,49 +26,55 @@ from flask_bootstrap import Bootstrap
26 26
 
27 27
 
28 28
 app_path = os.path.dirname(os.path.abspath(__file__))
29
-app = Flask('Yaml Editor!',
30
-            template_folder=os.path.join(app_path, 'templates'),
31
-            static_folder=os.path.join(app_path, 'static'))
29
+app = Flask(
30
+    "Yaml Editor!",
31
+    template_folder=os.path.join(app_path, "templates"),
32
+    static_folder=os.path.join(app_path, "static"),
33
+)
32 34
 Bootstrap(app)
33
-logging.getLogger('werkzeug').setLevel(logging.ERROR)
35
+logging.getLogger("werkzeug").setLevel(logging.ERROR)
34 36
 LOG = app.logger
35 37
 
36 38
 
37
-@app.route('/favicon.ico')
39
+@app.route("/favicon.ico")
38 40
 def favicon():
39
-    return send_from_directory(app.static_folder, 'favicon.ico')
41
+    return send_from_directory(app.static_folder, "favicon.ico")
40 42
 
41 43
 
42
-@app.route('/', methods=['GET', 'POST'])
44
+@app.route("/", methods=["GET", "POST"])
43 45
 def index():
44 46
     """Renders index page to edit provided yaml file."""
45
-    LOG.info('Rendering yaml file for editing')
46
-    with open(app.config['YAML_FILE']) as file_obj:
47
+    LOG.info("Rendering yaml file for editing")
48
+    with open(app.config["YAML_FILE"]) as file_obj:
47 49
         data = yaml.safe_load(file_obj)
48
-    return render_template('yaml.html',
49
-                           data=json.dumps(data),
50
-                           change_str=app.config['STRING_TO_CHANGE'])
50
+    return render_template(
51
+        "yaml.html",
52
+        data=json.dumps(data),
53
+        change_str=app.config["STRING_TO_CHANGE"],
54
+    )
51 55
 
52 56
 
53
-@app.route('/save', methods=['POST'])
57
+@app.route("/save", methods=["POST"])
54 58
 def save():
55 59
     """Save current progress on file."""
56
-    LOG.info('Saving edited inputs from user to yaml file')
57
-    out = request.json.get('yaml_data')
58
-    with open(app.config['YAML_FILE'], 'w') as file_obj:
60
+    LOG.info("Saving edited inputs from user to yaml file")
61
+    out = request.json.get("yaml_data")
62
+    with open(app.config["YAML_FILE"], "w") as file_obj:
59 63
         yaml.safe_dump(out, file_obj, default_flow_style=False)
60 64
     return "Data saved successfully!"
61 65
 
62 66
 
63
-@app.route('/saveExit', methods=['POST'])
67
+@app.route("/saveExit", methods=["POST"])
64 68
 def save_exit():
65 69
     """Save current progress on file and shuts down the server."""
66
-    LOG.info('Saving edited inputs from user to yaml file and shutting'
67
-             ' down server')
68
-    out = request.json.get('yaml_data')
69
-    with open(app.config['YAML_FILE'], 'w') as file_obj:
70
+    LOG.info(
71
+        "Saving edited inputs from user to yaml file and shutting"
72
+        " down server"
73
+    )
74
+    out = request.json.get("yaml_data")
75
+    with open(app.config["YAML_FILE"], "w") as file_obj:
70 76
         yaml.safe_dump(out, file_obj, default_flow_style=False)
71
-    func = request.environ.get('werkzeug.server.shutdown')
77
+    func = request.environ.get("werkzeug.server.shutdown")
72 78
     if func:
73 79
         func()
74 80
     return "Saved successfully, Shutting down app! You may close the tab!"
@@ -77,68 +83,72 @@ def save_exit():
77 83
 @app.errorhandler(404)
78 84
 def page_not_found(e):
79 85
     """Serves 404 error."""
80
-    LOG.info('User tried to access unavailable page.')
81
-    return '<h1>404: Page not Found!</h1>'
86
+    LOG.info("User tried to access unavailable page.")
87
+    return "<h1>404: Page not Found!</h1>"
82 88
 
83 89
 
84 90
 def run(*args, **kwargs):
85 91
     """Starts the server."""
86
-    LOG.info('Initiating web server for yaml editing')
87
-    port = kwargs.get('port', None)
92
+    LOG.info("Initiating web server for yaml editing")
93
+    port = kwargs.get("port", None)
88 94
     if not port:
89 95
         port = 8161
90
-    app.run(host='0.0.0.0', port=port, debug=False)
96
+    app.run(host="0.0.0.0", port=port, debug=False)
91 97
 
92 98
 
93 99
 @click.command()
94 100
 @click.option(
95
-    '--file',
96
-    '-f',
101
+    "--file",
102
+    "-f",
97 103
     required=True,
98 104
     type=click.File(),
99 105
     multiple=False,
100
-    help="Path with file name to the intermediary yaml file."
106
+    help="Path with file name to the intermediary yaml file.",
101 107
 )
102 108
 @click.option(
103
-    '--host',
104
-    '-h',
105
-    default='0.0.0.0',
109
+    "--host",
110
+    "-h",
111
+    default="0.0.0.0",
106 112
     type=click.STRING,
107 113
     multiple=False,
108
-    help="Optional host parameter to run Flask on."
114
+    help="Optional host parameter to run Flask on.",
109 115
 )
110 116
 @click.option(
111
-    '--port',
112
-    '-p',
117
+    "--port",
118
+    "-p",
113 119
     default=8161,
114 120
     type=click.INT,
115 121
     multiple=False,
116
-    help="Optional port parameter to run Flask on."
122
+    help="Optional port parameter to run Flask on.",
117 123
 )
118 124
 @click.option(
119
-    '--string',
120
-    '-s',
121
-    default='#CHANGE_ME',
125
+    "--string",
126
+    "-s",
127
+    default="#CHANGE_ME",
122 128
     type=click.STRING,
123 129
     multiple=False,
124
-    help="Text which is required to be changed on yaml file."
130
+    help="Text which is required to be changed on yaml file.",
125 131
 )
126 132
 def main(*args, **kwargs):
127 133
     LOG.setLevel(logging.INFO)
128
-    LOG.info('Initiating yaml-editor')
134
+    LOG.info("Initiating yaml-editor")
129 135
     try:
130
-        yaml.safe_load(kwargs['file'])
136
+        yaml.safe_load(kwargs["file"])
131 137
     except yaml.YAMLError as e:
132
-        LOG.error('EXITTING - Please provide a valid yaml file.')
133
-        if hasattr(e, 'problem_mark'):
138
+        LOG.error("EXITTING - Please provide a valid yaml file.")
139
+        if hasattr(e, "problem_mark"):
134 140
             mark = e.problem_mark
135
-            LOG.error("Error position: ({0}:{1})".format(
136
-                mark.line + 1, mark.column + 1))
141
+            LOG.error(
142
+                "Error position: ({0}:{1})".format(
143
+                    mark.line + 1, mark.column + 1
144
+                )
145
+            )
137 146
         sys.exit(2)
138 147
     except Exception:
139
-        LOG.error('EXITTING - Please provide a valid yaml file.')
148
+        LOG.error("EXITTING - Please provide a valid yaml file.")
140 149
         sys.exit(2)
141
-    LOG.info("""
150
+    LOG.info(
151
+        """
142 152
 
143 153
 ##############################################################################
144 154
 
@@ -146,12 +156,15 @@ Please go to http://{0}:{1}/ to edit your yaml file.
146 156
 
147 157
 ##############################################################################
148 158
 
149
-    """.format(kwargs['host'], kwargs['port']))
150
-    app.config['YAML_FILE'] = kwargs['file'].name
151
-    app.config['STRING_TO_CHANGE'] = kwargs['string']
159
+    """.format(
160
+            kwargs["host"], kwargs["port"]
161
+        )
162
+    )
163
+    app.config["YAML_FILE"] = kwargs["file"].name
164
+    app.config["STRING_TO_CHANGE"] = kwargs["string"]
152 165
     run(*args, **kwargs)
153 166
 
154 167
 
155
-if __name__ == '__main__':
168
+if __name__ == "__main__":
156 169
     """Invoked when used as a script."""
157 170
     main()

Loading…
Cancel
Save