Browse Source

Merge pull request #1 from hemanthnakkina/master

Base code for spyglass
gpsingh-1991 4 months ago
parent
commit
acd81d2b3f
No account linked to committer's email address

+ 4
- 0
.dockerignore View File

@@ -0,0 +1,4 @@
1
+**/__pycache__
2
+**/.tox
3
+**/.eggs
4
+**/spyglass.egg-info

+ 116
- 0
.gitignore View File

@@ -0,0 +1,116 @@
1
+# Byte-compiled / optimized / DLL files
2
+__pycache__/
3
+*.py[cod]
4
+*$py.class
5
+
6
+# C extensions
7
+*.so
8
+
9
+# Distribution / packaging
10
+.Python
11
+env/
12
+build/
13
+develop-eggs/
14
+dist/
15
+downloads/
16
+eggs/
17
+.eggs/
18
+lib/
19
+lib64/
20
+parts/
21
+sdist/
22
+var/
23
+wheels/
24
+*.egg-info/
25
+.installed.cfg
26
+*.egg
27
+*.tgz
28
+
29
+# PyInstaller
30
+#  Usually these files are written by a python script from a template
31
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+*.manifest
33
+*.spec
34
+
35
+# Installer logs
36
+pip-log.txt
37
+pip-delete-this-directory.txt
38
+
39
+# Unit test / coverage reports
40
+htmlcov/
41
+.tox/
42
+.coverage
43
+.coverage.*
44
+.cache
45
+nosetests.xml
46
+coverage.xml
47
+*.cover
48
+.hypothesis/
49
+.testrepository/*
50
+cover/*
51
+results/*
52
+.stestr/
53
+
54
+# Translations
55
+*.mo
56
+*.pot
57
+
58
+# Django stuff:
59
+*.log
60
+local_settings.py
61
+
62
+# Flask stuff:
63
+instance/
64
+.webassets-cache
65
+
66
+# Scrapy stuff:
67
+.scrapy
68
+
69
+# PyBuilder
70
+target/
71
+
72
+# Jupyter Notebook
73
+.ipynb_checkpoints
74
+
75
+# pyenv
76
+.python-version
77
+
78
+# celery beat schedule file
79
+celerybeat-schedule
80
+
81
+# SageMath parsed files
82
+*.sage.py
83
+
84
+# dotenv
85
+.env
86
+
87
+# virtualenv
88
+.venv
89
+venv/
90
+ENV/
91
+
92
+# Spyder project settings
93
+.spyderproject
94
+.spyproject
95
+
96
+# Rope project settings
97
+.ropeproject
98
+
99
+# mkdocs documentation
100
+/site
101
+
102
+# mypy
103
+.mypy_cache/
104
+
105
+# pycharm-ide
106
+.idea/
107
+
108
+# osx
109
+.DS_Store
110
+
111
+# git
112
+Changelog
113
+AUTHORS
114
+
115
+# Ansible
116
+*.retry

+ 201
- 0
LICENSE View File

@@ -0,0 +1,201 @@
1
+                                 Apache License
2
+                           Version 2.0, January 2004
3
+                        http://www.apache.org/licenses/
4
+
5
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+   1. Definitions.
8
+
9
+      "License" shall mean the terms and conditions for use, reproduction,
10
+      and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+      "Licensor" shall mean the copyright owner or entity authorized by
13
+      the copyright owner that is granting the License.
14
+
15
+      "Legal Entity" shall mean the union of the acting entity and all
16
+      other entities that control, are controlled by, or are under common
17
+      control with that entity. For the purposes of this definition,
18
+      "control" means (i) the power, direct or indirect, to cause the
19
+      direction or management of such entity, whether by contract or
20
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+      outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+      "You" (or "Your") shall mean an individual or Legal Entity
24
+      exercising permissions granted by this License.
25
+
26
+      "Source" form shall mean the preferred form for making modifications,
27
+      including but not limited to software source code, documentation
28
+      source, and configuration files.
29
+
30
+      "Object" form shall mean any form resulting from mechanical
31
+      transformation or translation of a Source form, including but
32
+      not limited to compiled object code, generated documentation,
33
+      and conversions to other media types.
34
+
35
+      "Work" shall mean the work of authorship, whether in Source or
36
+      Object form, made available under the License, as indicated by a
37
+      copyright notice that is included in or attached to the work
38
+      (an example is provided in the Appendix below).
39
+
40
+      "Derivative Works" shall mean any work, whether in Source or Object
41
+      form, that is based on (or derived from) the Work and for which the
42
+      editorial revisions, annotations, elaborations, or other modifications
43
+      represent, as a whole, an original work of authorship. For the purposes
44
+      of this License, Derivative Works shall not include works that remain
45
+      separable from, or merely link (or bind by name) to the interfaces of,
46
+      the Work and Derivative Works thereof.
47
+
48
+      "Contribution" shall mean any work of authorship, including
49
+      the original version of the Work and any modifications or additions
50
+      to that Work or Derivative Works thereof, that is intentionally
51
+      submitted to Licensor for inclusion in the Work by the copyright owner
52
+      or by an individual or Legal Entity authorized to submit on behalf of
53
+      the copyright owner. For the purposes of this definition, "submitted"
54
+      means any form of electronic, verbal, or written communication sent
55
+      to the Licensor or its representatives, including but not limited to
56
+      communication on electronic mailing lists, source code control systems,
57
+      and issue tracking systems that are managed by, or on behalf of, the
58
+      Licensor for the purpose of discussing and improving the Work, but
59
+      excluding communication that is conspicuously marked or otherwise
60
+      designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+      "Contributor" shall mean Licensor and any individual or Legal Entity
63
+      on behalf of whom a Contribution has been received by Licensor and
64
+      subsequently incorporated within the Work.
65
+
66
+   2. Grant of Copyright License. Subject to the terms and conditions of
67
+      this License, each Contributor hereby grants to You a perpetual,
68
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+      copyright license to reproduce, prepare Derivative Works of,
70
+      publicly display, publicly perform, sublicense, and distribute the
71
+      Work and such Derivative Works in Source or Object form.
72
+
73
+   3. Grant of Patent License. Subject to the terms and conditions of
74
+      this License, each Contributor hereby grants to You a perpetual,
75
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+      (except as stated in this section) patent license to make, have made,
77
+      use, offer to sell, sell, import, and otherwise transfer the Work,
78
+      where such license applies only to those patent claims licensable
79
+      by such Contributor that are necessarily infringed by their
80
+      Contribution(s) alone or by combination of their Contribution(s)
81
+      with the Work to which such Contribution(s) was submitted. If You
82
+      institute patent litigation against any entity (including a
83
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+      or a Contribution incorporated within the Work constitutes direct
85
+      or contributory patent infringement, then any patent licenses
86
+      granted to You under this License for that Work shall terminate
87
+      as of the date such litigation is filed.
88
+
89
+   4. Redistribution. You may reproduce and distribute copies of the
90
+      Work or Derivative Works thereof in any medium, with or without
91
+      modifications, and in Source or Object form, provided that You
92
+      meet the following conditions:
93
+
94
+      (a) You must give any other recipients of the Work or
95
+          Derivative Works a copy of this License; and
96
+
97
+      (b) You must cause any modified files to carry prominent notices
98
+          stating that You changed the files; and
99
+
100
+      (c) You must retain, in the Source form of any Derivative Works
101
+          that You distribute, all copyright, patent, trademark, and
102
+          attribution notices from the Source form of the Work,
103
+          excluding those notices that do not pertain to any part of
104
+          the Derivative Works; and
105
+
106
+      (d) If the Work includes a "NOTICE" text file as part of its
107
+          distribution, then any Derivative Works that You distribute must
108
+          include a readable copy of the attribution notices contained
109
+          within such NOTICE file, excluding those notices that do not
110
+          pertain to any part of the Derivative Works, in at least one
111
+          of the following places: within a NOTICE text file distributed
112
+          as part of the Derivative Works; within the Source form or
113
+          documentation, if provided along with the Derivative Works; or,
114
+          within a display generated by the Derivative Works, if and
115
+          wherever such third-party notices normally appear. The contents
116
+          of the NOTICE file are for informational purposes only and
117
+          do not modify the License. You may add Your own attribution
118
+          notices within Derivative Works that You distribute, alongside
119
+          or as an addendum to the NOTICE text from the Work, provided
120
+          that such additional attribution notices cannot be construed
121
+          as modifying the License.
122
+
123
+      You may add Your own copyright statement to Your modifications and
124
+      may provide additional or different license terms and conditions
125
+      for use, reproduction, or distribution of Your modifications, or
126
+      for any such Derivative Works as a whole, provided Your use,
127
+      reproduction, and distribution of the Work otherwise complies with
128
+      the conditions stated in this License.
129
+
130
+   5. Submission of Contributions. Unless You explicitly state otherwise,
131
+      any Contribution intentionally submitted for inclusion in the Work
132
+      by You to the Licensor shall be under the terms and conditions of
133
+      this License, without any additional terms or conditions.
134
+      Notwithstanding the above, nothing herein shall supersede or modify
135
+      the terms of any separate license agreement you may have executed
136
+      with Licensor regarding such Contributions.
137
+
138
+   6. Trademarks. This License does not grant permission to use the trade
139
+      names, trademarks, service marks, or product names of the Licensor,
140
+      except as required for reasonable and customary use in describing the
141
+      origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+   7. Disclaimer of Warranty. Unless required by applicable law or
144
+      agreed to in writing, Licensor provides the Work (and each
145
+      Contributor provides its Contributions) on an "AS IS" BASIS,
146
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+      implied, including, without limitation, any warranties or conditions
148
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+      PARTICULAR PURPOSE. You are solely responsible for determining the
150
+      appropriateness of using or redistributing the Work and assume any
151
+      risks associated with Your exercise of permissions under this License.
152
+
153
+   8. Limitation of Liability. In no event and under no legal theory,
154
+      whether in tort (including negligence), contract, or otherwise,
155
+      unless required by applicable law (such as deliberate and grossly
156
+      negligent acts) or agreed to in writing, shall any Contributor be
157
+      liable to You for damages, including any direct, indirect, special,
158
+      incidental, or consequential damages of any character arising as a
159
+      result of this License or out of the use or inability to use the
160
+      Work (including but not limited to damages for loss of goodwill,
161
+      work stoppage, computer failure or malfunction, or any and all
162
+      other commercial damages or losses), even if such Contributor
163
+      has been advised of the possibility of such damages.
164
+
165
+   9. Accepting Warranty or Additional Liability. While redistributing
166
+      the Work or Derivative Works thereof, You may choose to offer,
167
+      and charge a fee for, acceptance of support, warranty, indemnity,
168
+      or other liability obligations and/or rights consistent with this
169
+      License. However, in accepting such obligations, You may act only
170
+      on Your own behalf and on Your sole responsibility, not on behalf
171
+      of any other Contributor, and only if You agree to indemnify,
172
+      defend, and hold each Contributor harmless for any liability
173
+      incurred by, or claims asserted against, such Contributor by reason
174
+      of your accepting any such warranty or additional liability.
175
+
176
+   END OF TERMS AND CONDITIONS
177
+
178
+   APPENDIX: How to apply the Apache License to your work.
179
+
180
+      To apply the Apache License to your work, attach the following
181
+      boilerplate notice, with the fields enclosed by brackets "[]"
182
+      replaced with your own identifying information. (Don't include
183
+      the brackets!)  The text should be enclosed in the appropriate
184
+      comment syntax for the file format. We also recommend that a
185
+      file or class name and description of purpose be included on the
186
+      same "printed page" as the copyright notice for easier
187
+      identification within third-party archives.
188
+
189
+   Copyright [yyyy] [name of copyright owner]
190
+
191
+   Licensed under the Apache License, Version 2.0 (the "License");
192
+   you may not use this file except in compliance with the License.
193
+   You may obtain a copy of the License at
194
+
195
+       http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+   Unless required by applicable law or agreed to in writing, software
198
+   distributed under the License is distributed on an "AS IS" BASIS,
199
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+   See the License for the specific language governing permissions and
201
+   limitations under the License.

+ 84
- 0
Makefile View File

@@ -0,0 +1,84 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the "License");
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+#     http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+SPYGLASS_BUILD_CTX  ?= .
16
+IMAGE_NAME        ?= spyglass
17
+IMAGE_PREFIX      ?= att-comdev
18
+DOCKER_REGISTRY   ?= quay.io
19
+IMAGE_TAG         ?= latest
20
+PROXY             ?= http://proxy.foo.com:8000
21
+NO_PROXY          ?= localhost,127.0.0.1,.svc.cluster.local
22
+USE_PROXY         ?= false
23
+PUSH_IMAGE        ?= false
24
+LABEL             ?= commit-id
25
+IMAGE             ?= $(DOCKER_REGISTRY)/$(IMAGE_PREFIX)/$(IMAGE_NAME):$(IMAGE_TAG)
26
+PYTHON_BASE_IMAGE ?= python:3.6
27
+export
28
+
29
+# Build all docker images for this project
30
+.PHONY: images
31
+images: build_spyglass
32
+
33
+# Run an image locally and exercise simple tests
34
+.PHONY: run_images
35
+run_images: run_spyglass
36
+
37
+.PHONY: run_spyglass
38
+run_spyglass: build_spyglass
39
+	tools/spyglass.sh --help
40
+
41
+.PHONY: security
42
+security:
43
+	tox -c tox.ini -e bandit
44
+
45
+# Perform Linting
46
+.PHONY: lint
47
+lint: py_lint
48
+
49
+# Perform auto formatting
50
+.PHONY: format
51
+format: py_format
52
+
53
+.PHONY: build_spyglass
54
+build_spyglass:
55
+ifeq ($(USE_PROXY), true)
56
+	docker build -t $(IMAGE) --network=host --label $(LABEL) -f images/spyglass/Dockerfile \
57
+		--build-arg FROM=$(PYTHON_BASE_IMAGE) \
58
+		--build-arg http_proxy=$(PROXY) \
59
+		--build-arg https_proxy=$(PROXY) \
60
+		--build-arg HTTP_PROXY=$(PROXY) \
61
+		--build-arg HTTPS_PROXY=$(PROXY) \
62
+		--build-arg no_proxy=$(NO_PROXY) \
63
+		--build-arg NO_PROXY=$(NO_PROXY) \
64
+		--build-arg ctx_base=$(SPYGLASS_BUILD_CTX) .
65
+else
66
+	docker build -t $(IMAGE) --network=host --label $(LABEL) -f images/spyglass/Dockerfile \
67
+		--build-arg FROM=$(PYTHON_BASE_IMAGE) \
68
+		--build-arg ctx_base=$(SPYGLASS_BUILD_CTX) .
69
+endif
70
+ifeq ($(PUSH_IMAGE), true)
71
+	docker push $(IMAGE)
72
+endif
73
+
74
+.PHONY: clean
75
+clean:
76
+	rm -rf build
77
+
78
+.PHONY: py_lint
79
+py_lint:
80
+	tox -e pep8
81
+
82
+.PHONY: py_format
83
+py_format:
84
+	tox -e fmt

+ 29
- 2
README.md View File

@@ -1,2 +1,29 @@
1
-# spyglass
2
-staging for the spyglass airship-spyglass repo
1
+
2
+What is Spyglass?
3
+----------------
4
+
5
+Spyglass is the data extractor tool which can interface with
6
+different input data sources to generate site manifest YAML files.
7
+The data sources will provide all the configuration data needed
8
+for a site deployment. These site manifest YAML files generated
9
+by spyglass will be saved in a Git repository, from where Pegleg
10
+can access and aggregate them. This aggregated file can then be
11
+fed to shipyard for site deployment / updates.
12
+
13
+Spyglass follows plugin model to support multiple input data sources.
14
+Current supported plugins are formation-plugin and Tugboat. Formation
15
+is a rest API based service which will be the source of information
16
+related to hardware, networking, site data. Formation plugin will
17
+interact with Formation API to gather necessary configuration.
18
+Similarly Tugboat accepts engineering spec which is in the form of
19
+spreadsheet and an index file to read spreadsheet as inputs and
20
+generates the site level manifests.
21
+As an optional step it can generate an intermediary yaml which contain
22
+all the information that will be rendered to generate Airship site
23
+manifests. This optional step will help the deployment engineer to
24
+modify any data if required.
25
+
26
+Basic Usage
27
+-----------
28
+
29
+TODO

+ 13
- 0
images/spyglass/Dockerfile View File

@@ -0,0 +1,13 @@
1
+ARG FROM=python:3.6
2
+FROM ${FROM}
3
+
4
+VOLUME /var/spyglass
5
+WORKDIR /var/spyglass
6
+
7
+ARG ctx_base=.
8
+
9
+COPY ${ctx_base}/requirements.txt /opt/spyglass/requirements.txt
10
+RUN pip3 install --no-cache-dir -r /opt/spyglass/requirements.txt
11
+
12
+COPY ${ctx_base} /opt/spyglass
13
+RUN pip3 install -e /opt/spyglass

+ 7
- 0
requirements.txt View File

@@ -0,0 +1,7 @@
1
+jinja2==2.10
2
+jsonschema
3
+netaddr
4
+openpyxl==2.5.4
5
+pyyaml==3.12
6
+requests
7
+six

+ 45
- 0
setup.py View File

@@ -0,0 +1,45 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the "License");
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+#     http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+from setuptools import setup
16
+from setuptools import find_packages
17
+
18
+setup(
19
+    name='spyglass',
20
+    version='0.0.1',
21
+    description='Generate Airship specific yaml manifests from data sources',
22
+    url='http://github.com/att-comdev/tugboat',
23
+    python_requires='>=3.5.0',
24
+    license='Apache 2.0',
25
+    packages=find_packages(),
26
+    install_requires=[
27
+        'jsonschema',
28
+        'Click',
29
+        'openpyxl',
30
+        'netaddr',
31
+        'pyyaml',
32
+        'jinja2',
33
+        'flask',
34
+        'flask-bootstrap',
35
+    ],
36
+    entry_points={
37
+        'console_scripts': [
38
+            'spyglass=spyglass.spyglass:main',
39
+        ],
40
+        'data_extractor_plugins':
41
+        ['formation=spyglass.data_extractor.plugins.formation:FormationPlugin',
42
+        ]
43
+    },
44
+    include_package_data=True,
45
+)

+ 0
- 0
spyglass/data_extractor/__init__.py View File


+ 450
- 0
spyglass/data_extractor/base.py View File

@@ -0,0 +1,450 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the 'License');
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an 'AS IS' BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+import abc
16
+import pprint
17
+import six
18
+import logging
19
+
20
+from spyglass.utils import utils
21
+
22
+LOG = logging.getLogger(__name__)
23
+
24
+
25
+@six.add_metaclass(abc.ABCMeta)
26
+class BaseDataSourcePlugin(object):
27
+    """Provide basic hooks for data source plugins"""
28
+
29
+    def __init__(self, region):
30
+        self.source_type = None
31
+        self.source_name = None
32
+        self.region = region
33
+        self.site_data = {}
34
+
35
+    @abc.abstractmethod
36
+    def set_config_opts(self, conf):
37
+        """Placeholder to set confgiuration options
38
+        specific to each plugin.
39
+
40
+        :param dict conf: Configuration options as dict
41
+
42
+        Example: conf = { 'excel_spec': 'spec1.yaml',
43
+                          'excel_path': 'excel.xls' }
44
+
45
+        Each plugin will have their own config opts.
46
+        """
47
+        return
48
+
49
+    @abc.abstractmethod
50
+    def get_plugin_conf(self, kwargs):
51
+        """ Validate and returns the plugin config parameters.
52
+        If validation fails, Spyglass exits.
53
+
54
+        :param char pointer: Spyglass CLI parameters.
55
+
56
+        :returns plugin conf if successfully validated.
57
+
58
+        Each plugin implements their own validaton mechanism.
59
+        """
60
+        return {}
61
+
62
+    @abc.abstractmethod
63
+    def get_racks(self, region):
64
+        """Return list of racks in the region
65
+
66
+        :param string region: Region name
67
+
68
+        :returns: list of rack names
69
+
70
+        :rtype: list
71
+
72
+        Example: ['rack01', 'rack02']
73
+        """
74
+        return []
75
+
76
+    @abc.abstractmethod
77
+    def get_hosts(self, region, rack):
78
+        """Return list of hosts in the region
79
+
80
+        :param string region: Region name
81
+        :param string rack: Rack name
82
+
83
+        :returns: list of hosts information
84
+
85
+        :rtype: list of dict
86
+
87
+        Example: [
88
+                     {
89
+                         'name': 'host01',
90
+                         'type': 'controller',
91
+                         'host_profile': 'hp_01'
92
+                     },
93
+                     {
94
+                         'name': 'host02',
95
+                         'type': 'compute',
96
+                         'host_profile': 'hp_02'}
97
+                 ]
98
+        """
99
+        return []
100
+
101
+    @abc.abstractmethod
102
+    def get_networks(self, region):
103
+        """Return list of networks in the region
104
+
105
+        :param string region: Region name
106
+
107
+        :returns: list of networks and their vlans
108
+
109
+        :rtype: list of dict
110
+
111
+        Example: [
112
+                     {
113
+                         'name': 'oob',
114
+                         'vlan': '41',
115
+                         'subnet': '192.168.1.0/24',
116
+                         'gateway': '192.168.1.1'
117
+                     },
118
+                     {
119
+                         'name': 'pxe',
120
+                         'vlan': '42',
121
+                         'subnet': '192.168.2.0/24',
122
+                         'gateway': '192.168.2.1'
123
+                     },
124
+                     {
125
+                         'name': 'oam',
126
+                         'vlan': '43',
127
+                         'subnet': '192.168.3.0/24',
128
+                         'gateway': '192.168.3.1'
129
+                     },
130
+                     {
131
+                         'name': 'ksn',
132
+                         'vlan': '44',
133
+                         'subnet': '192.168.4.0/24',
134
+                         'gateway': '192.168.4.1'
135
+                     },
136
+                     {
137
+                         'name': 'storage',
138
+                         'vlan': '45',
139
+                         'subnet': '192.168.5.0/24',
140
+                         'gateway': '192.168.5.1'
141
+                     },
142
+                     {
143
+                         'name': 'overlay',
144
+                         'vlan': '45',
145
+                         'subnet': '192.168.6.0/24',
146
+                         'gateway': '192.168.6.1'
147
+                     }
148
+                 ]
149
+        """
150
+
151
+        # TODO(nh863p): Expand the return type if they are rack level subnets
152
+        # TODO(nh863p): Is ingress information can be provided here?
153
+        return []
154
+
155
+    @abc.abstractmethod
156
+    def get_ips(self, region, host):
157
+        """Return list of IPs on the host
158
+
159
+        :param string region: Region name
160
+        :param string host: Host name
161
+
162
+        :returns: Dict of IPs per network on the host
163
+
164
+        :rtype: dict
165
+
166
+        Example: {'oob': {'ipv4': '192.168.1.10'},
167
+                  'pxe': {'ipv4': '192.168.2.10'}}
168
+
169
+        The network name from get_networks is expected to be the keys of this
170
+        dict. In case some networks are missed, they are expected to be either
171
+        DHCP or internally generated n the next steps by the design rules.
172
+        """
173
+        return {}
174
+
175
+    @abc.abstractmethod
176
+    def get_dns_servers(self, region):
177
+        """Return the DNS servers
178
+
179
+        :param string region: Region name
180
+
181
+        :returns: List of DNS servers to be configured on host
182
+
183
+        :rtype: List
184
+
185
+        Example: ['8.8.8.8', '8.8.8.4']
186
+        """
187
+        return []
188
+
189
+    @abc.abstractmethod
190
+    def get_ntp_servers(self, region):
191
+        """Return the NTP servers
192
+
193
+        :param string region: Region name
194
+
195
+        :returns: List of NTP servers to be configured on host
196
+
197
+        :rtype: List
198
+
199
+        Example: ['ntp1.ubuntu1.example', 'ntp2.ubuntu.example']
200
+        """
201
+        return []
202
+
203
+    @abc.abstractmethod
204
+    def get_ldap_information(self, region):
205
+        """Return the LDAP server information
206
+
207
+        :param string region: Region name
208
+
209
+        :returns: LDAP server information
210
+
211
+        :rtype: Dict
212
+
213
+        Example: {'url': 'ldap.example.com',
214
+                  'common_name': 'ldap-site1',
215
+                  'domain': 'test',
216
+                  'subdomain': 'test_sub1'}
217
+        """
218
+        return {}
219
+
220
+    @abc.abstractmethod
221
+    def get_location_information(self, region):
222
+        """Return location information
223
+
224
+        :param string region: Region name
225
+
226
+        :returns: Dict of location information
227
+
228
+        :rtype: dict
229
+
230
+        Example: {'name': 'Dallas',
231
+                  'physical_location': 'DAL01',
232
+                  'state': 'Texas',
233
+                  'country': 'US',
234
+                  'corridor': 'CR1'}
235
+        """
236
+        return {}
237
+
238
+    @abc.abstractmethod
239
+    def get_domain_name(self, region):
240
+        """Return the Domain name
241
+
242
+        :param string region: Region name
243
+
244
+        :returns: Domain name
245
+
246
+        :rtype: string
247
+
248
+        Example: example.com
249
+        """
250
+        return ""
251
+
252
+    def extract_baremetal_information(self):
253
+        """Get baremetal information from plugin
254
+
255
+        :returns: dict of baremetal nodes
256
+
257
+        :rtype: dict
258
+
259
+        Return dict should be in the format
260
+        {
261
+          'EXAMR06': {                 # rack name
262
+            'examr06c036': {           # host name
263
+              'host_profile': None,
264
+              'ip': {
265
+                'overlay': {},
266
+                'oob': {},
267
+                'calico': {},
268
+                'oam': {},
269
+                'storage': {},
270
+                'pxe': {}
271
+              },
272
+              'rack': 'EXAMR06',
273
+              'type': 'compute'
274
+            }
275
+          }
276
+        }
277
+        """
278
+        LOG.info("Extract baremetal information from plugin")
279
+        baremetal = {}
280
+        is_genesis = False
281
+        hosts = self.get_hosts(self.region)
282
+
283
+        # For each host list fill host profile and network IPs
284
+        for host in hosts:
285
+            host_name = host['name']
286
+            rack_name = host['rack_name']
287
+
288
+            if rack_name not in baremetal:
289
+                baremetal[rack_name] = {}
290
+
291
+            # Prepare temp dict for each host and append it to baremetal
292
+            # at a rack level
293
+            temp_host = {}
294
+            if host['host_profile'] is None:
295
+                temp_host['host_profile'] = "#CHANGE_ME"
296
+            else:
297
+                temp_host['host_profile'] = host['host_profile']
298
+
299
+            # Get Host IPs from plugin
300
+            temp_host_ips = self.get_ips(self.region, host_name)
301
+
302
+            # Fill network IP for this host
303
+            temp_host['ip'] = {}
304
+            temp_host['ip']['oob'] = temp_host_ips[host_name].get('oob', "")
305
+            temp_host['ip']['calico'] = temp_host_ips[host_name].get(
306
+                'calico', "")
307
+            temp_host['ip']['oam'] = temp_host_ips[host_name].get('oam', "")
308
+            temp_host['ip']['storage'] = temp_host_ips[host_name].get(
309
+                'storage', "")
310
+            temp_host['ip']['overlay'] = temp_host_ips[host_name].get(
311
+                'overlay', "")
312
+            temp_host['ip']['pxe'] = temp_host_ips[host_name].get(
313
+                'pxe', "#CHANGE_ME")
314
+
315
+            # Filling rack_type( compute/controller/genesis)
316
+            # "cp" host profile is controller
317
+            # "ns" host profile is compute
318
+            if (temp_host['host_profile'] == 'cp'):
319
+                # The controller node is designates as genesis"
320
+                if is_genesis is False:
321
+                    is_genesis = True
322
+                    temp_host['type'] = 'genesis'
323
+                else:
324
+                    temp_host['type'] = 'controller'
325
+            else:
326
+                temp_host['type'] = 'compute'
327
+
328
+            baremetal[rack_name][host_name] = temp_host
329
+        LOG.debug("Baremetal information:\n{}".format(
330
+            pprint.pformat(baremetal)))
331
+
332
+        return baremetal
333
+
334
+    def extract_site_information(self):
335
+        """Get site information from plugin
336
+
337
+        :returns: dict of site information
338
+
339
+        :rtpe: dict
340
+
341
+        Return dict should be in the format
342
+        {
343
+          'name': '',
344
+          'country': '',
345
+          'state': '',
346
+          'corridor': '',
347
+          'sitetype': '',
348
+          'dns': [],
349
+          'ntp': [],
350
+          'ldap': {},
351
+          'domain': None
352
+        }
353
+        """
354
+        LOG.info("Extract site information from plugin")
355
+        site_info = {}
356
+
357
+        # Extract location information
358
+        location_data = self.get_location_information(self.region)
359
+        if location_data is not None:
360
+            site_info = location_data
361
+
362
+        dns_data = self.get_dns_servers(self.region)
363
+        site_info['dns'] = dns_data
364
+
365
+        ntp_data = self.get_ntp_servers(self.region)
366
+        site_info['ntp'] = ntp_data
367
+
368
+        ldap_data = self.get_ldap_information(self.region)
369
+        site_info['ldap'] = ldap_data
370
+
371
+        domain_data = self.get_domain_name(self.region)
372
+        site_info['domain'] = domain_data
373
+
374
+        LOG.debug("Extracted site information:\n{}".format(
375
+            pprint.pformat(site_info)))
376
+
377
+        return site_info
378
+
379
+    def extract_network_information(self):
380
+        """Get network information from plugin
381
+        like Subnets, DNS, NTP, LDAP details.
382
+
383
+        :returns: dict of baremetal nodes
384
+
385
+        :rtype: dict
386
+
387
+        Return dict should be in the format
388
+        {
389
+          'vlan_network_data': {
390
+            'oam': {},
391
+            'ingress': {},
392
+            'oob': {}
393
+            'calico': {},
394
+            'storage': {},
395
+            'pxe': {},
396
+            'overlay': {}
397
+          }
398
+        }
399
+        """
400
+        LOG.info("Extract network information from plugin")
401
+        network_data = {}
402
+        networks = self.get_networks(self.region)
403
+
404
+        # We are interested in only the below networks mentioned in
405
+        # networks_to_scan, so look for these networks from the data
406
+        # returned by plugin
407
+        networks_to_scan = [
408
+            'calico', 'overlay', 'pxe', 'storage', 'oam', 'oob', 'ingress'
409
+        ]
410
+        network_data['vlan_network_data'] = {}
411
+
412
+        for net in networks:
413
+            tmp_net = {}
414
+            if net['name'] in networks_to_scan:
415
+                tmp_net['subnet'] = net['subnet']
416
+                tmp_net['vlan'] = net['vlan']
417
+
418
+            network_data['vlan_network_data'][net['name']] = tmp_net
419
+
420
+        LOG.debug("Extracted network data:\n{}".format(
421
+            pprint.pformat(network_data)))
422
+        return network_data
423
+
424
+    def extract_data(self):
425
+        """Extract data from plugin
426
+
427
+        Gather data related to baremetal, networks, storage and other site
428
+        related information from plugin
429
+        """
430
+        LOG.info("Extract data from plugin")
431
+        site_data = {}
432
+        site_data['baremetal'] = self.extract_baremetal_information()
433
+        site_data['site_info'] = self.extract_site_information()
434
+        site_data['network'] = self.extract_network_information()
435
+        self.site_data = site_data
436
+        return site_data
437
+
438
+    def apply_additional_data(self, extra_data):
439
+        """Apply any additional inputs from user
440
+
441
+        In case plugin doesnot provide some data, user can specify
442
+        the same as part of additional data in form of dict. The user
443
+        provided dict will be merged recursively to site_data.
444
+        If there is repetition of data then additional data supplied
445
+        shall take precedence.
446
+        """
447
+        LOG.info("Update site data with additional input")
448
+        tmp_site_data = utils.dict_merge(self.site_data, extra_data)
449
+        self.site_data = tmp_site_data
450
+        return self.site_data

+ 46
- 0
spyglass/data_extractor/custom_exceptions.py View File

@@ -0,0 +1,46 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the 'License');
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an 'AS IS' BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+import logging
15
+import sys
16
+
17
+LOG = logging.getLogger(__name__)
18
+
19
+
20
+class BaseError(Exception):
21
+    def __init__(self, msg):
22
+        self.msg = msg
23
+
24
+    def display_error(self):
25
+        LOG.info(self.msg)
26
+        sys.exit(1)
27
+
28
+
29
+class MissingAttributeError(BaseError):
30
+    pass
31
+
32
+
33
+class MissingValueError(BaseError):
34
+    pass
35
+
36
+
37
+class ApiClientError(BaseError):
38
+    pass
39
+
40
+
41
+class TokenGenerationError(BaseError):
42
+    pass
43
+
44
+
45
+class ConnectionError(BaseError):
46
+    pass

+ 0
- 0
spyglass/data_extractor/plugins/__init__.py View File


+ 496
- 0
spyglass/data_extractor/plugins/formation.py View File

@@ -0,0 +1,496 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the 'License');
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an 'AS IS' BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+import logging
16
+import pprint
17
+import re
18
+import requests
19
+import formation_client
20
+import urllib3
21
+
22
+from spyglass.data_extractor.base import BaseDataSourcePlugin
23
+
24
+from spyglass.data_extractor.custom_exceptions import (
25
+    ApiClientError, ConnectionError, MissingAttributeError,
26
+    TokenGenerationError)
27
+
28
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
29
+
30
+LOG = logging.getLogger(__name__)
31
+
32
+
33
+class FormationPlugin(BaseDataSourcePlugin):
34
+    def __init__(self, region):
35
+        # Save site name is valid
36
+        try:
37
+            assert region is not None
38
+            super().__init__(region)
39
+        except AssertionError:
40
+            LOG.error("Site: None! Spyglass exited!")
41
+            LOG.info("Check spyglass --help for details")
42
+            exit()
43
+
44
+        self.source_type = 'rest'
45
+        self.source_name = 'formation'
46
+
47
+        # Configuration parameters
48
+        self.formation_api_url = None
49
+        self.user = None
50
+        self.password = None
51
+        self.token = None
52
+
53
+        # Formation objects
54
+        self.client_config = None
55
+        self.formation_api_client = None
56
+
57
+        # Site related data
58
+        self.region_zone_map = {}
59
+        self.site_name_id_mapping = {}
60
+        self.zone_name_id_mapping = {}
61
+        self.region_name_id_mapping = {}
62
+        self.rack_name_id_mapping = {}
63
+        self.device_name_id_mapping = {}
64
+        LOG.info("Initiated data extractor plugin:{}".format(self.source_name))
65
+
66
+    def set_config_opts(self, conf):
67
+        """ Sets the config params passed by CLI"""
68
+        LOG.info("Plugin params passed:\n{}".format(pprint.pformat(conf)))
69
+        self._validate_config_options(conf)
70
+        self.formation_api_url = conf['url']
71
+        self.user = conf['user']
72
+        self.password = conf['password']
73
+        self.token = conf.get('token', None)
74
+
75
+        self._get_formation_client()
76
+        self._update_site_and_zone(self.region)
77
+
78
+    def get_plugin_conf(self, kwargs):
79
+        """ Validates the plugin param and return if success"""
80
+        try:
81
+            assert (kwargs['formation_url']
82
+                    ) is not None, "formation_url is Not Specified"
83
+            url = kwargs['formation_url']
84
+            assert (kwargs['formation_user']
85
+                    ) is not None, "formation_user is Not Specified"
86
+            user = kwargs['formation_user']
87
+            assert (kwargs['formation_password']
88
+                    ) is not None, "formation_password is Not Specified"
89
+            password = kwargs['formation_password']
90
+        except AssertionError:
91
+            LOG.error("Insufficient plugin parameter! Spyglass exited!")
92
+            raise
93
+            exit()
94
+
95
+        plugin_conf = {'url': url, 'user': user, 'password': password}
96
+        return plugin_conf
97
+
98
+    def _validate_config_options(self, conf):
99
+        """Validate the CLI params passed
100
+
101
+        The method checks for missing parameters and terminates
102
+        Spyglass execution if found so.
103
+        """
104
+
105
+        missing_params = []
106
+        for key in conf.keys():
107
+            if conf[key] is None:
108
+                missing_params.append(key)
109
+        if len(missing_params) != 0:
110
+            LOG.error("Missing Plugin Params{}:".format(missing_params))
111
+            exit()
112
+
113
+    # Implement helper classes
114
+
115
+    def _generate_token(self):
116
+        """Generate token for Formation
117
+        Formation API does not provide separate resource to generate
118
+        token. This is a workaround to call directly Formation API
119
+        to get token instead of using Formation client.
120
+        """
121
+        # Create formation client config object
122
+        self.client_config = formation_client.Configuration()
123
+        self.client_config.host = self.formation_api_url
124
+        self.client_config.username = self.user
125
+        self.client_config.password = self.password
126
+        self.client_config.verify_ssl = False
127
+
128
+        # Assumes token is never expired in the execution of this tool
129
+        if self.token:
130
+            return self.token
131
+
132
+        url = self.formation_api_url + '/zones'
133
+        try:
134
+            token_response = requests.get(
135
+                url,
136
+                auth=(self.user, self.password),
137
+                verify=self.client_config.verify_ssl)
138
+        except requests.exceptions.ConnectionError:
139
+            raise ConnectionError('Incorrect URL: {}'.format(url))
140
+
141
+        if token_response.status_code == 200:
142
+            self.token = token_response.json().get('X-Subject-Token', None)
143
+        else:
144
+            raise TokenGenerationError(
145
+                'Unable to generate token because {}'.format(
146
+                    token_response.reason))
147
+
148
+        return self.token
149
+
150
+    def _get_formation_client(self):
151
+        """Create formation client object
152
+
153
+        Formation uses X-Auth-Token for authentication and should be in
154
+        format "user|token".
155
+        Generate the token and add it formation config object.
156
+        """
157
+        token = self._generate_token()
158
+        self.client_config.api_key = {'X-Auth-Token': self.user + '|' + token}
159
+        self.formation_api_client = formation_client.ApiClient(
160
+            self.client_config)
161
+
162
+    def _update_site_and_zone(self, region):
163
+        """Get Zone name and Site name from region"""
164
+
165
+        zone = self._get_zone_by_region_name(region)
166
+        site = self._get_site_by_zone_name(zone)
167
+
168
+        # zone = region[:-1]
169
+        # site = zone[:-1]
170
+
171
+        self.region_zone_map[region] = {}
172
+        self.region_zone_map[region]['zone'] = zone
173
+        self.region_zone_map[region]['site'] = site
174
+
175
+    def _get_zone_by_region_name(self, region_name):
176
+        zone_api = formation_client.ZonesApi(self.formation_api_client)
177
+        zones = zone_api.zones_get()
178
+
179
+        # Walk through each zone and get regions
180
+        # Return when region name matches
181
+        for zone in zones:
182
+            self.zone_name_id_mapping[zone.name] = zone.id
183
+            zone_regions = self.get_regions(zone.name)
184
+            if region_name in zone_regions:
185
+                return zone.name
186
+
187
+        return None
188
+
189
+    def _get_site_by_zone_name(self, zone_name):
190
+        site_api = formation_client.SitesApi(self.formation_api_client)
191
+        sites = site_api.sites_get()
192
+
193
+        # Walk through each site and get zones
194
+        # Return when site name matches
195
+        for site in sites:
196
+            self.site_name_id_mapping[site.name] = site.id
197
+            site_zones = self.get_zones(site.name)
198
+            if zone_name in site_zones:
199
+                return site.name
200
+
201
+        return None
202
+
203
+    def _get_site_id_by_name(self, site_name):
204
+        if site_name in self.site_name_id_mapping:
205
+            return self.site_name_id_mapping.get(site_name)
206
+
207
+        site_api = formation_client.SitesApi(self.formation_api_client)
208
+        sites = site_api.sites_get()
209
+        for site in sites:
210
+            self.site_name_id_mapping[site.name] = site.id
211
+            if site.name == site_name:
212
+                return site.id
213
+
214
+    def _get_zone_id_by_name(self, zone_name):
215
+        if zone_name in self.zone_name_id_mapping:
216
+            return self.zone_name_id_mapping.get(zone_name)
217
+
218
+        zone_api = formation_client.ZonesApi(self.formation_api_client)
219
+        zones = zone_api.zones_get()
220
+        for zone in zones:
221
+            if zone.name == zone_name:
222
+                self.zone_name_id_mapping[zone.name] = zone.id
223
+                return zone.id
224
+
225
+    def _get_region_id_by_name(self, region_name):
226
+        if region_name in self.region_name_id_mapping:
227
+            return self.region_name_id_mapping.get(region_name)
228
+
229
+        for zone in self.zone_name_id_mapping:
230
+            self.get_regions(zone)
231
+
232
+        return self.region_name_id_mapping.get(region_name, None)
233
+
234
+    def _get_rack_id_by_name(self, rack_name):
235
+        if rack_name in self.rack_name_id_mapping:
236
+            return self.rack_name_id_mapping.get(rack_name)
237
+
238
+        for zone in self.zone_name_id_mapping:
239
+            self.get_racks(zone)
240
+
241
+        return self.rack_name_id_mapping.get(rack_name, None)
242
+
243
+    def _get_device_id_by_name(self, device_name):
244
+        if device_name in self.device_name_id_mapping:
245
+            return self.device_name_id_mapping.get(device_name)
246
+
247
+        self.get_hosts(self.zone)
248
+
249
+        return self.device_name_id_mapping.get(device_name, None)
250
+
251
+    def _get_racks(self, zone, rack_type='compute'):
252
+        zone_id = self._get_zone_id_by_name(zone)
253
+        rack_api = formation_client.RacksApi(self.formation_api_client)
254
+        racks = rack_api.zones_zone_id_racks_get(zone_id)
255
+
256
+        racks_list = []
257
+        for rack in racks:
258
+            rack_name = rack.name
259
+            self.rack_name_id_mapping[rack_name] = rack.id
260
+            if rack.rack_type.name == rack_type:
261
+                racks_list.append(rack_name)
262
+
263
+        return racks_list
264
+
265
+    # Functions that will be used internally within this plugin
266
+
267
+    def get_zones(self, site=None):
268
+        zone_api = formation_client.ZonesApi(self.formation_api_client)
269
+
270
+        if site is None:
271
+            zones = zone_api.zones_get()
272
+        else:
273
+            site_id = self._get_site_id_by_name(site)
274
+            zones = zone_api.sites_site_id_zones_get(site_id)
275
+
276
+        zones_list = []
277
+        for zone in zones:
278
+            zone_name = zone.name
279
+            self.zone_name_id_mapping[zone_name] = zone.id
280
+            zones_list.append(zone_name)
281
+
282
+        return zones_list
283
+
284
+    def get_regions(self, zone):
285
+        zone_id = self._get_zone_id_by_name(zone)
286
+        region_api = formation_client.RegionApi(self.formation_api_client)
287
+        regions = region_api.zones_zone_id_regions_get(zone_id)
288
+        regions_list = []
289
+        for region in regions:
290
+            region_name = region.name
291
+            self.region_name_id_mapping[region_name] = region.id
292
+            regions_list.append(region_name)
293
+
294
+        return regions_list
295
+
296
+    # Implement Abstract functions
297
+
298
+    def get_racks(self, region):
299
+        zone = self.region_zone_map[region]['zone']
300
+        return self._get_racks(zone, rack_type='compute')
301
+
302
+    def get_hosts(self, region, rack=None):
303
+        zone = self.region_zone_map[region]['zone']
304
+        zone_id = self._get_zone_id_by_name(zone)
305
+        device_api = formation_client.DevicesApi(self.formation_api_client)
306
+        control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id)
307
+        compute_hosts = device_api.zones_zone_id_devices_get(
308
+            zone_id, type='KVM')
309
+
310
+        hosts_list = []
311
+        for host in control_hosts:
312
+            self.device_name_id_mapping[host.aic_standard_name] = host.id
313
+            hosts_list.append({
314
+                'name': host.aic_standard_name,
315
+                'type': 'controller',
316
+                'rack_name': host.rack_name,
317
+                'host_profile': host.host_profile_name
318
+            })
319
+
320
+        for host in compute_hosts:
321
+            self.device_name_id_mapping[host.aic_standard_name] = host.id
322
+            hosts_list.append({
323
+                'name': host.aic_standard_name,
324
+                'type': 'compute',
325
+                'rack_name': host.rack_name,
326
+                'host_profile': host.host_profile_name
327
+            })
328
+        """
329
+        for host in itertools.chain(control_hosts, compute_hosts):
330
+            self.device_name_id_mapping[host.aic_standard_name] = host.id
331
+            hosts_list.append({
332
+            'name': host.aic_standard_name,
333
+            'type': host.categories[0],
334
+            'rack_name': host.rack_name,
335
+            'host_profile': host.host_profile_name
336
+            })
337
+        """
338
+
339
+        return hosts_list
340
+
341
+    def get_networks(self, region):
342
+        zone = self.region_zone_map[region]['zone']
343
+        zone_id = self._get_zone_id_by_name(zone)
344
+        region_id = self._get_region_id_by_name(region)
345
+        vlan_api = formation_client.VlansApi(self.formation_api_client)
346
+        vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get(
347
+            zone_id, region_id)
348
+
349
+        # Case when vlans list is empty from
350
+        # zones_zone_id_regions_region_id_vlans_get
351
+        if len(vlans) is 0:
352
+            # get device-id from the first host and get the network details
353
+            hosts = self.get_hosts(self.region)
354
+            host = hosts[0]['name']
355
+            device_id = self._get_device_id_by_name(host)
356
+            vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
357
+                zone_id, device_id)
358
+
359
+        LOG.debug("Extracted region network information\n{}".format(vlans))
360
+        vlans_list = []
361
+        for vlan_ in vlans:
362
+            if len(vlan_.vlan.ipv4) is not 0:
363
+                tmp_vlan = {}
364
+                tmp_vlan['name'] = self._get_network_name_from_vlan_name(
365
+                    vlan_.vlan.name)
366
+                tmp_vlan['vlan'] = vlan_.vlan.vlan_id
367
+                tmp_vlan['subnet'] = vlan_.vlan.subnet_range
368
+                tmp_vlan['gateway'] = vlan_.ipv4_gateway
369
+                tmp_vlan['subnet_level'] = vlan_.vlan.subnet_level
370
+                vlans_list.append(tmp_vlan)
371
+
372
+        return vlans_list
373
+
374
+    def get_ips(self, region, host=None):
375
+        zone = self.region_zone_map[region]['zone']
376
+        zone_id = self._get_zone_id_by_name(zone)
377
+
378
+        if host:
379
+            hosts = [host]
380
+        else:
381
+            hosts = []
382
+            hosts_dict = self.get_hosts(zone)
383
+            for host in hosts_dict:
384
+                hosts.append(host['name'])
385
+
386
+        vlan_api = formation_client.VlansApi(self.formation_api_client)
387
+        ip_ = {}
388
+
389
+        for host in hosts:
390
+            device_id = self._get_device_id_by_name(host)
391
+            vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
392
+                zone_id, device_id)
393
+            LOG.debug("Received VLAN Network Information\n{}".format(vlans))
394
+            ip_[host] = {}
395
+            for vlan_ in vlans:
396
+                # TODO(pg710r) We need to handle the case when incoming ipv4
397
+                # list is empty
398
+                if len(vlan_.vlan.ipv4) is not 0:
399
+                    name = self._get_network_name_from_vlan_name(
400
+                        vlan_.vlan.name)
401
+                    ipv4 = vlan_.vlan.ipv4[0].ip
402
+                    LOG.debug("vlan:{},name:{},ip:{},vlan_name:{}".format(
403
+                        vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name))
404
+                    # TODD(pg710r) This code needs to extended to support ipv4
405
+                    # and ipv6
406
+                    # ip_[host][name] = {'ipv4': ipv4}
407
+                    ip_[host][name] = ipv4
408
+
409
+        return ip_
410
+
411
+    def _get_network_name_from_vlan_name(self, vlan_name):
412
+        """ network names are ksn, oam, oob, overlay, storage, pxe
413
+
414
+        The following mapping rules apply:
415
+            vlan_name contains "ksn"  the network name is "calico"
416
+            vlan_name contains "storage" the network name is "storage"
417
+            vlan_name contains "server"  the network name is "oam"
418
+            vlan_name contains "ovs"  the network name is "overlay"
419
+            vlan_name contains "ILO" the network name is "oob"
420
+        """
421
+        network_names = {
422
+            'ksn': 'calico',
423
+            'storage': 'storage',
424
+            'server': 'oam',
425
+            'ovs': 'overlay',
426
+            'ILO': 'oob',
427
+            'pxe': 'pxe'
428
+        }
429
+
430
+        for name in network_names:
431
+            # Make a pattern that would ignore case.
432
+            # if name is 'ksn' pattern name is '(?i)(ksn)'
433
+            name_pattern = "(?i)({})".format(name)
434
+            if re.search(name_pattern, vlan_name):
435
+                return network_names[name]
436
+
437
+        return ("")
438
+
439
+    def get_dns_servers(self, region):
440
+        try:
441
+            zone = self.region_zone_map[region]['zone']
442
+            zone_id = self._get_zone_id_by_name(zone)
443
+            zone_api = formation_client.ZonesApi(self.formation_api_client)
444
+            zone_ = zone_api.zones_zone_id_get(zone_id)
445
+        except formation_client.rest.ApiException as e:
446
+            raise ApiClientError(e.msg)
447
+
448
+        if not zone_.ipv4_dns:
449
+            LOG.warn("No dns server")
450
+            return []
451
+
452
+        dns_list = []
453
+        for dns in zone_.ipv4_dns:
454
+            dns_list.append(dns.ip)
455
+
456
+        return dns_list
457
+
458
+    def get_ntp_servers(self, region):
459
+        return []
460
+
461
+    def get_ldap_information(self, region):
462
+        return {}
463
+
464
+    def get_location_information(self, region):
465
+        """ get location information for a zone and return """
466
+        site = self.region_zone_map[region]['site']
467
+        site_id = self._get_site_id_by_name(site)
468
+        site_api = formation_client.SitesApi(self.formation_api_client)
469
+        site_info = site_api.sites_site_id_get(site_id)
470
+
471
+        try:
472
+            return {
473
+                # 'corridor': site_info.corridor,
474
+                'name': site_info.city,
475
+                'state': site_info.state,
476
+                'country': site_info.country,
477
+                'physical_location_id': site_info.clli,
478
+            }
479
+        except AttributeError as e:
480
+            raise MissingAttributeError('Missing {} information in {}'.format(
481
+                e, site_info.city))
482
+
483
+    def get_domain_name(self, region):
484
+        try:
485
+            zone = self.region_zone_map[region]['zone']
486
+            zone_id = self._get_zone_id_by_name(zone)
487
+            zone_api = formation_client.ZonesApi(self.formation_api_client)
488
+            zone_ = zone_api.zones_zone_id_get(zone_id)
489
+        except formation_client.rest.ApiException as e:
490
+            raise ApiClientError(e.msg)
491
+
492
+        if not zone_.dns:
493
+            LOG.warn('Got None while running get domain name')
494
+            return None
495
+
496
+        return zone_.dns

+ 0
- 0
spyglass/parser/__init__.py View File


+ 289
- 0
spyglass/parser/engine.py View File

@@ -0,0 +1,289 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the "License");
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+import copy
16
+import json
17
+import logging
18
+import pkg_resources
19
+import pprint
20
+import sys
21
+
22
+import jsonschema
23
+import netaddr
24
+import yaml
25
+
26
+LOG = logging.getLogger(__name__)
27
+
28
+
29
+class ProcessDataSource():
30
+    def __init__(self, sitetype):
31
+        # Initialize intermediary and save site type
32
+        self._initialize_intermediary()
33
+        self.region_name = sitetype
34
+
35
+    @staticmethod
36
+    def _read_file(file_name):
37
+        with open(file_name, 'r') as f:
38
+            raw_data = f.read()
39
+        return raw_data
40
+
41
+    def _initialize_intermediary(self):
42
+        self.host_type = {}
43
+        self.data = {
44
+            'network': {},
45
+            'baremetal': {},
46
+            'region_name': '',
47
+            'storage': {},
48
+            'site_info': {},
49
+        }
50
+        self.sitetype = None
51
+        self.genesis_node = None
52
+        self.region_name = None
53
+
54
+    def _get_network_subnets(self):
55
+        # Extract subnet information for networks
56
+        LOG.info("Extracting network subnets")
57
+        network_subnets = {}
58
+        for net_type in self.data['network']['vlan_network_data']:
59
+            # One of the type is ingress and we don't want that here
60
+            if (net_type != 'ingress'):
61
+                network_subnets[net_type] = netaddr.IPNetwork(
62
+                    self.data['network']['vlan_network_data'][net_type]
63
+                    ['subnet'])
64
+
65
+        LOG.debug("Network subnets:\n{}".format(
66
+            pprint.pformat(network_subnets)))
67
+        return network_subnets
68
+
69
+    def _get_genesis_node_details(self):
70
+        # Returns the genesis node details
71
+        LOG.info("Getting Genesis Node Details")
72
+        for racks in self.data['baremetal'].keys():
73
+            rack_hosts = self.data['baremetal'][racks]
74
+            for host in rack_hosts:
75
+                if rack_hosts[host]['type'] == 'genesis':
76
+                    self.genesis_node = rack_hosts[host]
77
+                    self.genesis_node['name'] = host
78
+
79
+        LOG.debug("Genesis Node Details:{}".format(
80
+            pprint.pformat(self.genesis_node)))
81
+
82
+    def _validate_extracted_data(self, data):
83
+        """ Validates the extracted data from input source.
84
+
85
+
86
+        It checks wether the data types and data format are as expected.
87
+        The method validates this with regex pattern defined for each
88
+        data type.
89
+        """
90
+        LOG.info('Validating data read from extracted data')
91
+        temp_data = {}
92
+        temp_data = copy.deepcopy(data)
93
+
94
+        # Converting baremetal dict to list.
95
+        baremetal_list = []
96
+        for rack in temp_data['baremetal'].keys():
97
+            temp = [{k: v} for k, v in temp_data['baremetal'][rack].items()]
98
+            baremetal_list = baremetal_list + temp
99
+
100
+        temp_data['baremetal'] = baremetal_list
101
+        schema_dir = pkg_resources.resource_filename('spyglass', 'schemas/')
102
+        schema_file = schema_dir + "data_schema.json"
103
+        json_data = json.loads(json.dumps(temp_data))
104
+        with open(schema_file, 'r') as f:
105
+            json_schema = json.load(f)
106
+
107
+        try:
108
+            # Suppressing writing of data2.json. Can use it for debugging
109
+            with open('data2.json', 'w') as outfile:
110
+                json.dump(temp_data, outfile, sort_keys=True, indent=4)
111
+            jsonschema.validate(json_data, json_schema)
112
+        except jsonschema.exceptions.ValidationError as e:
113
+            LOG.error("Validation Error")
114
+            LOG.error("Message:{}".format(e.message))
115
+            LOG.error("Validator_path:{}".format(e.path))
116
+            LOG.error("Validator_pattern:{}".format(e.validator_value))
117
+            LOG.error("Validator:{}".format(e.validator))
118
+            sys.exit()
119
+        except jsonschema.exceptions.SchemaError as e:
120
+            LOG.error("Schema Validation Error!!")
121
+            LOG.error("Message:{}".format(e.message))
122
+            LOG.error("Schema:{}".format(e.schema))
123
+            LOG.error("Validator_value:{}".format(e.validator_value))
124
+            LOG.error("Validator:{}".format(e.validator))
125
+            LOG.error("path:{}".format(e.path))
126
+            sys.exit()
127
+
128
+        LOG.info("Data validation Passed!")
129
+
130
+    def _apply_design_rules(self):
131
+        """ Applies design rules from rules.yaml
132
+
133
+
134
+        These rules are used to determine ip address allocation ranges,
135
+        host profile interfaces and also to create hardware profile
136
+        information. The method calls corresponding rule hander function
137
+        based on rule name and applies them to appropriate data objects.
138
+        """
139
+        LOG.info("Apply design rules")
140
+        rules_dir = pkg_resources.resource_filename('spyglass', 'config/')
141
+        rules_file = rules_dir + 'rules.yaml'
142
+        rules_data_raw = self._read_file(rules_file)
143
+        rules_yaml = yaml.safe_load(rules_data_raw)
144
+        rules_data = {}
145
+        rules_data.update(rules_yaml)
146
+
147
+        for rule in rules_data.keys():
148
+            rule_name = rules_data[rule]['name']
149
+            function_str = "_apply_rule_" + rule_name
150
+            rule_data_name = rules_data[rule][rule_name]
151
+            function = getattr(self, function_str)
152
+            function(rule_data_name)
153
+            LOG.info("Applying rule:{}".format(rule_name))
154
+
155
+    def _apply_rule_host_profile_interfaces(self, rule_data):
156
+        pass
157
+
158
+    def _apply_rule_hardware_profile(self, rule_data):
159
+        pass
160
+
161
+    def _apply_rule_ip_alloc_offset(self, rule_data):
162
+        """ Offset allocation rules to determine ip address range(s)
163
+
164
+
165
+        This rule is applied to incoming network data to determine
166
+        network address, gateway ip and other address ranges
167
+        """
168
+        LOG.info("Apply network design rules")
169
+        vlan_network_data = {}
170
+
171
+        # Collect Rules
172
+        default_ip_offset = rule_data['default']
173
+        oob_ip_offset = rule_data['oob']
174
+        gateway_ip_offset = rule_data['gateway']
175
+        ingress_vip_offset = rule_data['ingress_vip']
176
+        # static_ip_end_offset for non pxe network
177
+        static_ip_end_offset = rule_data['static_ip_end']
178
+        # dhcp_ip_end_offset for pxe network
179
+        dhcp_ip_end_offset = rule_data['dhcp_ip_end']
180
+
181
+        # Set ingress vip and CIDR for bgp
182
+        LOG.info("Applying rule to network bgp data")
183
+        subnet = netaddr.IPNetwork(
184
+            self.data['network']['vlan_network_data']['ingress']['subnet'][0])
185
+        ips = list(subnet)
186
+        self.data['network']['bgp']['ingress_vip'] = str(
187
+            ips[ingress_vip_offset])
188
+        self.data['network']['bgp']['public_service_cidr'] = self.data[
189
+            'network']['vlan_network_data']['ingress']['subnet'][0]
190
+        LOG.debug("Updated network bgp data:\n{}".format(
191
+            pprint.pformat(self.data['network']['bgp'])))
192
+
193
+        LOG.info("Applying rule to vlan network data")
194
+        # Get network subnets
195
+        network_subnets = self._get_network_subnets()
196
+        # Apply rules to vlan networks
197
+        for net_type in network_subnets:
198
+            if net_type == 'oob':
199
+                ip_offset = oob_ip_offset
200
+            else:
201
+                ip_offset = default_ip_offset
202
+            vlan_network_data[net_type] = {}
203
+            subnet = network_subnets[net_type]
204
+            ips = list(subnet)
205
+
206
+            vlan_network_data[net_type]['network'] = str(
207
+                network_subnets[net_type])
208
+
209
+            vlan_network_data[net_type]['gateway'] = str(
210
+                ips[gateway_ip_offset])
211
+
212
+            vlan_network_data[net_type]['reserved_start'] = str(ips[1])
213
+            vlan_network_data[net_type]['reserved_end'] = str(ips[ip_offset])
214
+
215
+            static_start = str(ips[ip_offset + 1])
216
+            static_end = str(ips[static_ip_end_offset])
217
+
218
+            if net_type == 'pxe':
219
+                mid = len(ips) // 2
220
+                static_end = str(ips[mid - 1])
221
+                dhcp_start = str(ips[mid])
222
+                dhcp_end = str(ips[dhcp_ip_end_offset])
223
+
224
+                vlan_network_data[net_type]['dhcp_start'] = dhcp_start
225
+                vlan_network_data[net_type]['dhcp_end'] = dhcp_end
226
+
227
+            vlan_network_data[net_type]['static_start'] = static_start
228
+            vlan_network_data[net_type]['static_end'] = static_end
229
+
230
+            # There is no vlan for oob network
231
+            if (net_type != 'oob'):
232
+                vlan_network_data[net_type]['vlan'] = self.data['network'][
233
+                    'vlan_network_data'][net_type]['vlan']
234
+
235
+            # OAM have default routes. Only for cruiser. TBD
236
+            if (net_type == 'oam'):
237
+                routes = ["0.0.0.0/0"]
238
+            else:
239
+                routes = []
240
+            vlan_network_data[net_type]['routes'] = routes
241
+
242
+            # Update network data to self.data
243
+            self.data['network']['vlan_network_data'][
244
+                net_type] = vlan_network_data[net_type]
245
+
246
+        LOG.debug("Updated vlan network data:\n{}".format(
247
+            pprint.pformat(vlan_network_data)))
248
+
249
+    def load_extracted_data_from_data_source(self, extracted_data):
250
+        """
251
+        Function called from spyglass.py to pass extracted data
252
+        from input data source
253
+        """
254
+        LOG.info("Load extracted data from data source")
255
+        self._validate_extracted_data(extracted_data)
256
+        self.data = extracted_data
257
+        LOG.debug("Extracted data from plugin data source:\n{}".format(
258
+            pprint.pformat(extracted_data)))
259
+        extracted_file = "extracted_file.yaml"
260
+        yaml_file = yaml.dump(extracted_data, default_flow_style=False)
261
+        with open(extracted_file, 'w') as f:
262
+            f.write(yaml_file)
263
+
264
+        # Append region_data supplied from CLI to self.data
265
+        self.data['region_name'] = self.region_name
266
+
267
+    def dump_intermediary_file(self, intermediary_dir):
268
+        """ Dumping intermediary yaml """
269
+        LOG.info("Dumping intermediary yaml")
270
+        intermediary_file = "{}_intermediary.yaml".format(
271
+            self.data['region_name'])
272
+
273
+        # Check of if output dir = intermediary_dir exists
274
+        if intermediary_dir is not None:
275
+            outfile = "{}/{}".format(intermediary_dir, intermediary_file)
276
+        else:
277
+            outfile = intermediary_file
278
+        LOG.info("Intermediary file dir:{}".format(outfile))
279
+        yaml_file = yaml.dump(self.data, default_flow_style=False)
280
+        with open(outfile, 'w') as f:
281
+            f.write(yaml_file)
282
+
283
+    def generate_intermediary_yaml(self):
284
+        """ Generating intermediary yaml """
285
+        LOG.info("Generating intermediary yaml")
286
+        self._apply_design_rules()
287
+        self._get_genesis_node_details()
288
+        self.intermediary_yaml = self.data
289
+        return self.intermediary_yaml

+ 362
- 0
spyglass/schemas/data_schema.json View File

@@ -0,0 +1,362 @@
1
+{
2
+  "$schema": "http://json-schema.org/schema#",
3
+  "title": "All",
4
+  "description": "All information",
5
+  "type": "object",
6
+  "properties": {
7
+    "baremetal": {
8
+      "type": "array",
9
+      "items": {
10
+        "type": "object",
11
+        "$ref": "#/definitions/baremetal_list"
12
+      }
13
+    },
14
+    "network": {
15
+      "type": "object",
16
+      "properties": {
17
+        "bgp": {
18
+          "type": "object",
19
+          "$ref": "#/definitions/bgp"
20
+        },
21
+        "vlan_network_data": {
22
+          "type": "array",
23
+          "$ref": "#/definitions/vlan_network_data"
24
+        }
25
+      },
26
+      "required": [
27
+        "bgp",
28
+        "vlan_network_data"
29
+      ]
30
+    },
31
+    "site_info": {
32
+      "type": "object",
33
+      "$ref": "#/definitions/site_info"
34
+    },
35
+    "storage": {
36
+      "type": "object",
37
+      "$ref": "#/definitions/storage"
38
+    }
39
+  },
40
+  "required": [
41
+    "baremetal",
42
+    "network",
43
+    "site_info",
44
+    "storage"
45
+  ],
46
+  "definitions": {
47
+    "baremetal_list": {
48
+      "type": "object",
49
+      "patternProperties": {
50
+        ".*": {
51
+          "properties": {
52
+            "ip": {
53
+              "type": "object",
54
+              "properties": {
55
+                "calico": {
56
+                  "type": "string",
57
+                  "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
58
+                },
59
+                "oam": {
60
+                  "type": "string",
61
+                  "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
62
+                },
63
+                "oob": {
64
+                  "type": "string",
65
+                  "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
66
+                },
67
+                "overlay": {
68
+                  "type": "string",
69
+                  "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
70
+                },
71
+                "pxe": {
72
+                  "type": "string",
73
+                  "pattern": "^((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(#CHANGE_ME)$"
74
+                },
75
+                "storage": {
76
+                  "type": "string",
77
+                  "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
78
+                }
79
+             },
80
+                "required" :[
81
+                  "calico",
82
+                  "oam",
83
+                  "oob",
84
+                  "overlay",
85
+                  "pxe",
86
+                  "storage"
87
+                  ]
88
+            },
89
+            "host_profile": {
90
+              "description": "Host profile of the host",
91
+              "type": "string",
92
+              "pattern": "^([a-zA-Z]+)|(#CHANGE_ME)$"
93
+            },
94
+            "type": {
95
+              "description": "Host profile type:Compute or Controller or genesis ",
96
+              "type": "string",
97
+              "pattern": "^(?i)compute|controller|genesis$"
98
+            }
99
+          },
100
+          "required" :[
101
+            "ip",
102
+            "host_profile",
103
+            "type"
104
+            ]
105
+        }
106
+      }
107
+    },
108
+    "bgp": {
109
+      "type": "object",
110
+      "properties": {
111
+        "asnumber": {
112
+          "type": "integer",
113
+          "pattern": "^[0-9]{1,10}$"
114
+        },
115
+        "peer_asnumber": {
116
+          "type": "integer",
117
+          "pattern": "^[0-9]{1,10}$"
118
+        },
119
+        "peers": {
120
+          "type": "array",
121
+          "items": [
122
+            {
123
+              "type": "string",
124
+              "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
125
+            }
126
+          ]
127
+        }
128
+      },
129
+      "required": [
130
+        "asnumber",
131
+        "peer_asnumber",
132
+        "peers"
133
+      ]
134
+    },
135
+    "vlan_network_data": {
136
+      "type": "object",
137
+      "properties": {
138
+        "calico": {
139
+          "type": "object",
140
+          "properties": {
141
+            "subnet": {
142
+              "description": "Subnet address of the network",
143
+              "type": "string",
144
+              "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$"
145
+            },
146
+            "vlan": {
147
+              "description": "Vlan id of the network",
148
+              "type": "string",
149
+              "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$"
150
+            }
151
+          },
152
+          "required": [
153
+            "subnet",
154
+            "vlan"
155
+          ]
156
+        },
157
+          "ingress": {
158
+          "type": "object",
159
+          "properties": {
160
+            "subnet": {
161
+              "description": "Subnet address of the network",
162
+              "type": "array",
163
+	      "items": [
164
+		{
165
+		"type": "string",
166
+		"pattern":"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$"
167
+		}
168
+		]
169
+            },
170
+            "vlan": {
171
+              "description": "Vlan id of the network",
172
+              "type": "string",
173
+              "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$"
174
+            }
175
+          },
176
+          "required": [
177
+            "subnet"
178
+          ]
179
+        },
180
+          "oam": {
181
+          "type": "object",
182
+          "properties": {
183
+            "subnet": {
184
+              "description": "Subnet address of the network",
185
+              "type": "string",
186
+              "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$"
187
+            },
188
+            "vlan": {
189
+              "description": "Vlan id of the network",
190
+              "type": "string",
191
+              "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$"
192
+            }
193
+          },
194
+          "required": [
195
+            "subnet",
196
+	    "vlan"
197
+          ]
198
+        },
199
+          "oob": {
200
+          "type": "object",
201
+          "properties": {
202
+            "subnet": {
203
+              "description": "Subnet address of the network",
204
+              "type": "string",
205
+              "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$"
206
+            },
207
+            "vlan": {
208
+              "description": "Vlan id of the network",
209
+              "type": "string",
210
+              "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$"
211
+            }
212
+          },
213
+          "required": [
214
+            "subnet",
215
+	    "vlan"
216
+          ]
217
+        },
218
+          "pxe": {
219
+          "type": "object",
220
+          "properties": {
221
+            "subnet": {
222
+              "description": "Subnet address of the network",
223
+              "type": "string",
224
+              "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$"
225
+            },
226
+            "vlan": {
227
+              "description": "Vlan id of the network",
228
+              "type": "string",
229
+              "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$"
230
+            }
231
+          },
232
+          "required": [
233
+            "subnet",
234
+	    "vlan"
235
+          ]
236
+        },
237
+          "storage": {
238
+          "type": "object",
239
+          "properties": {
240
+            "subnet": {
241
+              "description": "Subnet address of the network",
242
+              "type": "string",
243
+              "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$"
244
+            },
245
+            "vlan": {
246
+              "description": "Vlan id of the network",
247
+              "type": "string",
248
+              "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$"
249
+            }
250
+          },
251
+          "required": [
252
+            "subnet",
253
+	    "vlan"
254
+          ]
255
+        }
256
+
257
+      },
258
+        "required" :[
259
+          "calico",
260
+          "ingress",
261
+          "oam",
262
+          "oob",
263
+          "overlay",
264
+          "pxe",
265
+          "storage"
266
+          ]
267
+    },
268
+    "site_info": {
269
+      "type": "object",
270
+      "properties": {
271
+        "dns": {
272
+          "type": "object",
273
+          "properties": {
274
+            "servers": {
275
+              "type": "string",
276
+              "pattern": "^((((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]),)+)|(((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))+))+$"
277
+            }
278
+          }
279
+        },
280
+        "ntp": {
281
+          "type": "object",
282
+          "properties": {
283
+            "servers": {
284
+              "type": "string",
285
+              "pattern": "^((((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]),)+)|(((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))+))+$"
286
+            }
287
+          }
288
+        },
289
+        "ldap": {
290
+          "type": "object",
291
+          "properties": {
292
+            "common_name": {
293
+              "type": "string",
294
+              "pattern": "\\W+|\\w+"
295
+            },
296
+            "subdomain": {
297
+              "type": "string",
298
+              "pattern": "(?i)\\w+"
299
+            },
300
+            "url": {
301
+              "type": "string",
302
+              "pattern": "^\\w+://\\w+.*\\.[a-zA-Z]{2,3}$"
303
+            }
304
+          },
305
+          "required": [
306
+            "common_name",
307
+            "subdomain",
308
+            "url"
309
+          ]
310
+        },
311
+        "country": {
312
+          "type": "string",
313
+          "pattern": "(?i)\\w+"
314
+        },
315
+        "name": {
316
+          "type": "string",
317
+          "pattern": "(?i)\\w+"
318
+        },
319
+        "state": {
320
+          "type": "string",
321
+          "pattern": "(?i)\\w+"
322
+        },
323
+        "sitetype": {
324
+          "type": "string",
325
+          "pattern": "(?i)\\w+"
326
+        },
327
+        "physical_location_id": {
328
+          "type": "string",
329
+          "pattern": "^\\w+"
330
+        },
331
+        "domain": {
332
+          "type": "string",
333
+          "pattern": "^\\w+.*\\.[a-zA-Z]{2,3}$"
334
+        }
335
+      },
336
+      "required": [
337
+        "dns",
338
+        "ntp",
339
+        "ldap",
340
+        "country",
341
+        "name",
342
+        "state",
343
+        "sitetype",
344
+        "physical_location_id",
345
+        "domain"
346
+      ]
347
+    },
348
+    "storage": {
349
+      "type": "object",
350
+      "patternProperties": {
351
+        "ceph": {
352
+          "controller": {
353
+            "osd_count": {
354
+              "type": "integer",
355
+              "pattern": "^[0-9]{1,2}$"
356
+            }
357
+          }
358
+        }
359
+      }
360
+    }
361
+  }
362
+}

+ 0
- 0
spyglass/site_processors/__init__.py View File


+ 44
- 0
spyglass/site_processors/base.py View File

@@ -0,0 +1,44 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the "License");
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+
16
+class BaseProcessor:
17
+    def __init__(self, file_name):
18
+        pass
19
+
20
+    def render_template(self, template):
21
+        pass
22
+
23
+    @staticmethod
24
+    def get_role_wise_nodes(yaml_data):
25
+        hosts = {
26
+            'genesis': {},
27
+            'masters': [],
28
+            'workers': [],
29
+        }
30
+
31
+        for rack in yaml_data['baremetal']:
32
+            for host in yaml_data['baremetal'][rack]:
33
+                if yaml_data['baremetal'][rack][host]['type'] == 'genesis':
34
+                    hosts['genesis'] = {
35
+                        'name': host,
36
+                        'pxe': yaml_data['baremetal'][rack][host]['ip']['pxe'],
37
+                        'oam': yaml_data['baremetal'][rack][host]['ip']['oam'],
38
+                    }
39
+                elif yaml_data['baremetal'][rack][host][
40
+                        'type'] == 'controller':
41
+                    hosts['masters'].append(host)
42
+                else:
43
+                    hosts['workers'].append(host)
44
+        return hosts

+ 79
- 0
spyglass/site_processors/site_processor.py View File

@@ -0,0 +1,79 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the "License");
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+import logging
16
+import pkg_resources
17
+import os
18
+from jinja2 import Environment
19
+from jinja2 import FileSystemLoader
20
+from .base import BaseProcessor
21
+
22
+LOG = logging.getLogger(__name__)
23
+
24
+
25
+class SiteProcessor(BaseProcessor):
26
+    def __init__(self, intermediary_yaml, manifest_dir):
27
+        self.yaml_data = intermediary_yaml
28
+        self.manifest_dir = manifest_dir
29
+
30
+    def render_template(self):
31
+        """ The method  renders network config yaml from j2 templates.
32
+
33
+
34
+        Network configs common to all racks (i.e oam, overlay, storage,
35
+        calico) are generated in a single file. Rack specific
36
+        configs( pxe and oob) are generated per rack.
37
+        """
38
+        # Check of manifest_dir exists
39
+        if self.manifest_dir is not None:
40
+            site_manifest_dir = self.manifest_dir + '/pegleg_manifests/site/'
41
+        else:
42
+            site_manifest_dir = 'pegleg_manifests/site/'
43
+        LOG.info("Site manifest output dir:{}".format(site_manifest_dir))
44
+
45
+        template_software_dir = pkg_resources.resource_filename(
46
+            'spyglass', 'templates/')
47
+        template_dir_abspath = os.path.dirname(template_software_dir)
48
+        LOG.debug("Template Path:%s", template_dir_abspath)
49
+
50
+        for dirpath, dirs, files in os.walk(template_dir_abspath):
51
+            for filename in files:
52
+                j2_env = Environment(
53
+                    autoescape=False,
54
+                    loader=FileSystemLoader(dirpath),
55
+                    trim_blocks=True)
56
+                j2_env.filters[
57
+                    'get_role_wise_nodes'] = self.get_role_wise_nodes
58
+                templatefile = os.path.join(dirpath, filename)
59
+                outdirs = dirpath.split('templates')[1]
60
+
61
+                outfile_path = '{}{}{}'.format(
62
+                    site_manifest_dir, self.yaml_data['region_name'], outdirs)
63
+                outfile_yaml = templatefile.split('.j2')[0].split('/')[-1]
64
+                outfile = outfile_path + '/' + outfile_yaml
65
+                outfile_dir = os.path.dirname(outfile)
66
+                if not os.path.exists(outfile_dir):
67
+                    os.makedirs(outfile_dir)
68
+                template_j2 = j2_env.get_template(filename)
69
+                try:
70
+                    out = open(outfile, "w")
71
+                    template_j2.stream(data=self.yaml_data).dump(out)
72
+                    LOG.info("Rendering {}".format(outfile_yaml))
73
+                    out.close()
74
+                except IOError as ioe:
75
+                    LOG.error(
76
+                        "IOError during rendering:{}".format(outfile_yaml))
77
+                    raise SystemExit(
78
+                        "Error when generating {:s}:\n{:s}".format(
79
+                            outfile, ioe.strerror))

+ 172
- 0
spyglass/spyglass.py View File

@@ -0,0 +1,172 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the 'License');
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an 'AS IS' BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+import logging
16
+import pkg_resources
17
+import pprint
18
+
19
+import click
20
+import yaml
21
+
22
+from spyglass.parser.engine import ProcessDataSource
23
+from spyglass.site_processors.site_processor import SiteProcessor
24
+
25
+LOG = logging.getLogger('spyglass')
26
+
27
+
28
+@click.command()
29
+@click.option(
30
+    '--site',
31
+    '-s',
32
+    help='Specify the site for which manifests to be generated')
33
+@click.option(
34
+    '--type', '-t', help='Specify the plugin type formation or tugboat')
35
+@click.option('--formation_url', '-f', help='Specify the formation url')
36
+@click.option('--formation_user', '-u', help='Specify the formation user id')
37
+@click.option(
38
+    '--formation_password', '-p', help='Specify the formation user password')
39
+@click.option(
40
+    '--intermediary',
41
+    '-i',
42
+    type=click.Path(exists=True),
43
+    help=
44
+    'Intermediary file path  generate manifests, use -m also with this option')
45
+@click.option(
46
+    '--additional_config',
47
+    '-d',
48
+    type=click.Path(exists=True),
49
+    help='Site specific configuraton details')
50
+@click.option(
51
+    '--generate_intermediary',
52
+    '-g',
53
+    is_flag=True,
54
+    help='Dump intermediary file from passed excel and excel spec')
55
+@click.option(
56
+    '--intermediary_dir',
57
+    '-idir',
58
+    type=click.Path(exists=True),
59
+    help='The path where intermediary file needs to be generated')
60
+@click.option(
61
+    '--generate_manifests',
62
+    '-m',
63
+    is_flag=True,
64
+    help='Generate manifests from the generated intermediary file')
65
+@click.option(
66
+    '--manifest_dir',
67
+    '-mdir',
68
+    type=click.Path(exists=True),
69
+    help='The path where manifest files needs to be generated')
70
+@click.option(
71
+    '--loglevel',
72
+    '-l',
73
+    default=20,
74
+    multiple=False,
75
+    show_default=True,
76
+    help='Loglevel NOTSET:0 ,DEBUG:10, \
77
+    INFO:20, WARNING:30, ERROR:40, CRITICAL:50')
78
+def main(*args, **kwargs):
79
+    # Extract user provided inputs
80
+    generate_intermediary = kwargs['generate_intermediary']
81
+    intermediary_dir = kwargs['intermediary_dir']
82
+    generate_manifests = kwargs['generate_manifests']
83
+    manifest_dir = kwargs['manifest_dir']
84
+    intermediary = kwargs['intermediary']
85
+    site = kwargs['site']
86
+    loglevel = kwargs['loglevel']
87
+
88
+    # Set Logging format
89
+    LOG.setLevel(loglevel)
90
+    stream_handle = logging.StreamHandler()
91
+    formatter = logging.Formatter(
92
+        '(%(name)s): %(asctime)s %(levelname)s %(message)s')
93
+    stream_handle.setFormatter(formatter)
94
+    LOG.addHandler(stream_handle)
95
+
96
+    LOG.info("Spyglass start")
97
+    LOG.debug("CLI Parameters passed:\n{}".format(kwargs))
98
+
99
+    if not (generate_intermediary or generate_manifests):
100
+        LOG.error("Invalid CLI parameters passed!! Spyglass exited")
101
+        LOG.error("One of the options -m/-g is mandatory")
102
+        LOG.info("CLI Parameters:\n{}".format(kwargs))
103
+        exit()
104
+
105
+    # Generate Intermediary yaml and manifests extracting data
106
+    # from data source specified by plugin type
107
+    intermediary_yaml = {}
108
+    if intermediary is None:
109
+        LOG.info("Generating Intermediary yaml")
110
+        plugin_type = kwargs.get('type', None)
111
+        plugin_class = None
112
+
113
+        # Discover the plugin and load the plugin class
114
+        LOG.info("Load the plugin class")
115
+        for entry_point in pkg_resources.iter_entry_points(
116
+                'data_extractor_plugins'):
117
+            if entry_point.name == plugin_type:
118
+                plugin_class = entry_point.load()
119
+
120
+        if plugin_class is None:
121
+            LOG.error(
122
+                "Unsupported Plugin type. Plugin type:{}".format(plugin_type))
123
+            exit()
124
+
125
+        # Extract data from plugin data source
126
+        LOG.info("Extract data from plugin data source")
127
+        data_extractor = plugin_class(site)
128
+        plugin_conf = data_extractor.get_plugin_conf(kwargs)
129
+        data_extractor.set_config_opts(plugin_conf)
130
+        data_extractor.extract_data()
131
+
132
+        # Apply any additional_config provided by user
133
+        additional_config = kwargs.get('additional_config', None)
134
+        if additional_config is not None:
135
+            with open(additional_config, 'r') as config:
136
+                raw_data = config.read()
137
+                additional_config_data = yaml.safe_load(raw_data)
138
+            LOG.debug("Additional config data:\n{}".format(
139
+                pprint.pformat(additional_config_data)))
140
+
141
+            LOG.info("Apply additional configuration from:{}".format(
142
+                additional_config))
143
+            data_extractor.apply_additional_data(additional_config_data)
144
+            LOG.debug(pprint.pformat(data_extractor.site_data))
145
+
146
+        # Apply design rules to the data
147
+        LOG.info("Apply design rules to the extracted data")
148
+        process_input_ob = ProcessDataSource(site)
149
+        process_input_ob.load_extracted_data_from_data_source(
150
+            data_extractor.site_data)
151
+
152
+        LOG.info("Generate intermediary yaml")
153
+        intermediary_yaml = process_input_ob.generate_intermediary_yaml()
154
+    else:
155
+        LOG.info("Loading intermediary from user provided input")
156
+        with open(intermediary, 'r') as intermediary_file:
157
+            raw_data = intermediary_file.read()
158
+            intermediary_yaml = yaml.safe_load(raw_data)
159
+
160
+    if generate_intermediary:
161
+        process_input_ob.dump_intermediary_file(intermediary_dir)
162
+
163
+    if generate_manifests:
164
+        LOG.info("Generating site Manifests")
165
+        processor_engine = SiteProcessor(intermediary_yaml, manifest_dir)
166
+        processor_engine.render_template()
167
+
168
+    LOG.info("Spyglass Execution Completed")
169
+
170
+
171
+if __name__ == '__main__':
172
+    main()

+ 0
- 0
spyglass/utils/__init__.py View File


+ 41
- 0
spyglass/utils/utils.py View File

@@ -0,0 +1,41 @@
1
+# Copyright 2018 AT&T Intellectual Property.  All other rights reserved.
2
+#
3
+# Licensed under the Apache License, Version 2.0 (the 'License');
4
+# you may not use this file except in compliance with the License.
5
+# You may obtain a copy of the License at
6
+#
7
+# http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an 'AS IS' BASIS,
11
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+# See the License for the specific language governing permissions and
13
+# limitations under the License.
14
+
15
+
16
+# Merge two dictionaries
17
+def dict_merge(dictA, dictB, path=None):
18
+    """ Recursively  Merge dictionary dictB  into dictA
19
+
20
+
21
+    DictA represents the data extracted by a  plugin and DictB
22
+    represents the additional site config dictionary that is passed
23
+    to CLI. The merge process compares the dictionary keys and if they
24
+    are same and the values they point to are different , then
25
+    dictB object's value is copied to dictA. If a key is unique
26
+    to dictB, then it is copied to dictA.
27
+    """
28
+    if path is None:
29
+        path = []
30
+
31
+    for key in dictB:
32
+        if key in dictA:
33
+            if isinstance(dictA[key], dict) and isinstance(dictB[key], dict):
34
+                dict_merge(dictA[key], dictB[key], path + [str(key)])
35
+            elif dictA[key] == dictB[key]:
36
+                pass  # values are same, so no processing here
37
+            else:
38
+                dictA[key] = dictB[key]
39
+        else:
40
+            dictA[key] = dictB[key]
41
+    return dictA

+ 21
- 0
tools/spyglass.sh View File

@@ -0,0 +1,21 @@
1
+#!/usr/bin/env bash
2
+
3
+set -e
4
+
5
+: ${WORKSPACE:=$(pwd)}
6
+: ${IMAGE:=quay.io/att-comdev/spyglass:latest}
7
+
8
+echo
9
+echo "== NOTE: Workspace $WORKSPACE is the execution directory in the container =="
10
+echo
11
+
12
+# Working directory inside container to execute commands from and mount from
13
+# host OS
14
+container_workspace_path='/var/spyglass'
15
+
16
+docker run --rm -t \
17
+    --net=none \
18
+    --workdir="$container_workspace_path" \
19
+    -v "${WORKSPACE}:$container_workspace_path" \
20
+    "${IMAGE}" \
21
+    spyglass "${@}"

+ 45
- 0
tox.ini View File

@@ -0,0 +1,45 @@
1
+[tox]
2
+envlist = py35, py36, pep8, docs
3
+skipsdist = True
4
+
5
+[testenv]
6
+deps =
7
+  -r{toxinidir}/requirements.txt
8
+basepython=python3
9
+whitelist_externals =
10
+  find
11
+commands =
12
+  find . -type f -name "*.pyc" -delete
13
+  pytest \
14
+    {posargs}
15
+
16
+[testenv:fmt]
17
+deps = yapf
18
+commands =
19
+    yapf --style=pep8 -ir {toxinidir}/spyglass {toxinidir}/tests
20
+
21
+[testenv:pep8]
22
+deps = 
23
+    yapf
24
+    flake8
25
+commands =
26
+    #yapf --style=.style.yapf -rd {toxinidir}/spyglass
27
+    flake8 {toxinidir}/spyglass
28
+
29
+[testenv:bandit]
30
+deps =
31
+    bandit
32
+commands = bandit -r spyglass -n 5
33
+
34
+[flake8]
35
+ignore = E125,E251,W503
36
+
37
+[testenv:docs]
38
+basepython = python3
39
+deps =
40
+    -r{toxinidir}/requirements.txt
41
+    -r{toxinidir}/doc/requirements.txt
42
+commands =
43
+    rm -rf doc/build
44
+    sphinx-build -b html doc/source doc/build -n -W -v
45
+whitelist_externals = rm

Loading…
Cancel
Save