Browse Source

rpc layer

Change-Id: I397a9b51c99ce7e9326edccb6686f01f6b8df144
Kanagaraj Manickam 3 years ago
parent
commit
cc009a8e40

+ 12
- 1
etc/namos.conf View File

@@ -1,2 +1,13 @@
1
+[DEFAULT]
2
+rpc_backend = rabbit
3
+
4
+[oslo_messaging_rabbit]
5
+rabbit_userid = stackrabbit
6
+rabbit_password = password
7
+rabbit_hosts = 172.241.0.101
8
+
1 9
 [database]
2
-connection = mysql+pymysql://root:password@172.241.0.101/namos?charset=utf8
10
+connection = mysql+pymysql://root:password@172.241.0.101/namos?charset=utf8
11
+
12
+[conductor]
13
+enabled_services=namos

+ 62
- 0
namos/cmd/conductor.py View File

@@ -0,0 +1,62 @@
1
+# -*- encoding: utf-8 -*-
2
+#
3
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
4
+# All Rights Reserved.
5
+#
6
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
7
+#    not use this file except in compliance with the License. You may obtain
8
+#    a copy of the License at
9
+#
10
+#         http://www.apache.org/licenses/LICENSE-2.0
11
+#
12
+#    Unless required by applicable law or agreed to in writing, software
13
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
+#    License for the specific language governing permissions and limitations
16
+#    under the License.
17
+
18
+"""
19
+The Namos Infra Management Service
20
+"""
21
+
22
+import eventlet
23
+eventlet.monkey_patch()
24
+
25
+from oslo_config import cfg
26
+from oslo_log import log
27
+import oslo_messaging
28
+from oslo_service import service as os_service
29
+
30
+from namos.common import config
31
+from namos.common import service
32
+from namos.conductor import manager
33
+
34
+
35
+CONF = cfg.CONF
36
+CMD_NAME = 'namos-manager'
37
+LOG = log.getLogger(__name__)
38
+
39
+
40
+def main():
41
+    config.init_log()
42
+    config.init_conf(CMD_NAME)
43
+
44
+    from namos import conductor  # noqa
45
+    mgr = service.RPCService(CONF.conductor.host,
46
+                             config.PROJECT_NAME,
47
+                             manager.ConductorManager())
48
+    enabled_services = CONF.conductor.enabled_services
49
+
50
+    launcher = os_service.ProcessLauncher(CONF)
51
+    for srv in enabled_services.split(','):
52
+        LOG.info('Starting conductor for %s', srv)
53
+        oslo_messaging.set_transport_defaults(srv)
54
+        launcher.launch_service(mgr, CONF.conductor.workers)
55
+
56
+    # namos.register_myself()
57
+    # TODO(mrkanag) Namos is not registering the RPC backend, fix it !
58
+    launcher.wait()
59
+
60
+
61
+if __name__ == '__main__':
62
+    main()

+ 22
- 3
namos/common/config.py View File

@@ -19,18 +19,37 @@ import namos
19 19
 
20 20
 PROJECT_NAME = 'namos'
21 21
 VERSION = namos.__version__
22
-
22
+MESSAGE_QUEUE_CONDUCTOR_TOPIC = '%s.conductor' % PROJECT_NAME
23 23
 CONF = cfg.CONF
24 24
 
25 25
 
26
+conductor_opts = [
27
+    cfg.IntOpt('workers',
28
+               default=1,
29
+               help='Number of workers for conductor service. A single '
30
+                    'conductor is enabled by default.'),
31
+    cfg.StrOpt('enabled_services',
32
+               default='namos,cinder,nova,keystone,horizon,heat,'
33
+                       'neutron,glance,swift,trove',
34
+               help='List of service exchanges to listen for'),
35
+    cfg.StrOpt('host',
36
+               default='namos-dev',
37
+               help='conductor host name'),
38
+]
39
+
40
+
41
+def register_conductor_opts():
42
+    CONF.register_opts(conductor_opts, 'conductor')
43
+
44
+
26 45
 def init_conf(prog):
27 46
     CONF(project=PROJECT_NAME,
28 47
          version=VERSION,
29 48
          prog=prog)
30 49
 
31 50
 
32
-def setup_log(prog=PROJECT_NAME):
51
+def init_log(project=PROJECT_NAME):
33 52
     logging.register_options(cfg.CONF)
34 53
     logging.setup(cfg.CONF,
35
-                  prog,
54
+                  project,
36 55
                   version=VERSION)

+ 113
- 0
namos/common/messaging.py View File

@@ -0,0 +1,113 @@
1
+# -*- coding: utf-8 -*-
2
+
3
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
4
+# not use this file except in compliance with the License. You may obtain
5
+# a copy of the License at
6
+#
7
+#      http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12
+# License for the specific language governing permissions and limitations
13
+# under the License.
14
+
15
+from oslo_config import cfg
16
+from oslo_context import context
17
+import oslo_messaging
18
+from oslo_serialization import jsonutils
19
+
20
+
21
+DEFAULT_URL = "__default__"
22
+TRANSPORTS = {}
23
+
24
+_ALIASES = {
25
+    'namos.openstack.common.rpc.impl_kombu': 'rabbit',
26
+    'namos.openstack.common.rpc.impl_qpid': 'qpid',
27
+    'namos.openstack.common.rpc.impl_zmq': 'zmq',
28
+}
29
+
30
+
31
+class RequestContextSerializer(oslo_messaging.Serializer):
32
+    def __init__(self, base):
33
+        self._base = base
34
+
35
+    def serialize_entity(self, ctxt, entity):
36
+        if not self._base:
37
+            return entity
38
+        return self._base.serialize_entity(ctxt, entity)
39
+
40
+    def deserialize_entity(self, ctxt, entity):
41
+        if not self._base:
42
+            return entity
43
+        return self._base.deserialize_entity(ctxt, entity)
44
+
45
+    @staticmethod
46
+    def serialize_context(ctxt):
47
+        return ctxt.to_dict()
48
+
49
+    @staticmethod
50
+    def deserialize_context(ctxt):
51
+        return context.RequestContext(ctxt)
52
+
53
+
54
+class JsonPayloadSerializer(oslo_messaging.NoOpSerializer):
55
+    @classmethod
56
+    def serialize_entity(cls, context, entity):
57
+        return jsonutils.to_primitive(entity, convert_instances=True)
58
+
59
+
60
+def get_transport(url=None, optional=False, cache=True):
61
+    """Initialise the olso.messaging layer."""
62
+    global TRANSPORTS, DEFAULT_URL
63
+    cache_key = url or DEFAULT_URL
64
+    transport = TRANSPORTS.get(cache_key)
65
+    if not transport or not cache:
66
+        try:
67
+            transport = oslo_messaging.get_transport(cfg.CONF, url,
68
+                                                     aliases=_ALIASES)
69
+        except oslo_messaging.InvalidTransportURL as e:
70
+            if not optional or e.url:
71
+                # NOTE(sileht): olso.messaging is configured but unloadable
72
+                # so reraise the exception
73
+                raise
74
+            return None
75
+        else:
76
+            if cache:
77
+                TRANSPORTS[cache_key] = transport
78
+    return transport
79
+
80
+
81
+def get_rpc_server(host, exchange, topic, version, endpoint):
82
+    """Return a configured olso.messaging rpc server."""
83
+    oslo_messaging.set_transport_defaults(exchange)
84
+    target = oslo_messaging.Target(server=host,
85
+                                   topic=topic,
86
+                                   version=version)
87
+    serializer = RequestContextSerializer(JsonPayloadSerializer())
88
+    transport = get_transport(optional=True)
89
+    return oslo_messaging.get_rpc_server(transport, target,
90
+                                         [endpoint], executor='eventlet',
91
+                                         serializer=serializer)
92
+
93
+
94
+def get_rpc_client(topic, exchange, version, retry=None, **kwargs):
95
+    """Return a configured olso.messaging RPCClient."""
96
+    oslo_messaging.set_transport_defaults(exchange)
97
+    target = oslo_messaging.Target(version=version,
98
+                                   topic=topic, **kwargs)
99
+    serializer = RequestContextSerializer(JsonPayloadSerializer())
100
+    transport = get_transport(optional=True)
101
+    return oslo_messaging.RPCClient(transport, target,
102
+                                    serializer=serializer,
103
+                                    retry=retry,
104
+                                    version_cap=version)
105
+
106
+
107
+def cleanup():
108
+    """Cleanup the olso.messaging layer."""
109
+    global TRANSPORTS
110
+
111
+    for url in TRANSPORTS:
112
+        TRANSPORTS[url].cleanup()
113
+        del TRANSPORTS[url]

+ 63
- 0
namos/common/service.py View File

@@ -0,0 +1,63 @@
1
+# -*- coding: utf-8 -*-
2
+
3
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
4
+# not use this file except in compliance with the License. You may obtain
5
+# a copy of the License at
6
+#
7
+#      http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12
+# License for the specific language governing permissions and limitations
13
+# under the License.
14
+
15
+import eventlet
16
+eventlet.monkey_patch()
17
+
18
+from oslo_log import log
19
+from oslo_service import service
20
+
21
+from namos.common import messaging as rpc
22
+
23
+LOG = log.getLogger(__name__)
24
+
25
+
26
+class RPCService(service.Service):
27
+
28
+    def __init__(self,
29
+                 host,
30
+                 exchange,
31
+                 srv):
32
+        super(RPCService, self).__init__()
33
+        self.manager = srv
34
+
35
+        self.host = host
36
+        self.exchange = exchange
37
+        self.version = self.manager.RPC_API_VERSION
38
+        self.topic = self.manager.TOPIC
39
+        self.rpcserver = None
40
+
41
+    def start(self):
42
+        super(RPCService, self).start()
43
+
44
+        self.rpcserver = rpc.get_rpc_server(host=self.host,
45
+                                            topic=self.topic,
46
+                                            version=self.version,
47
+                                            endpoint=self.manager,
48
+                                            exchange=self.exchange)
49
+        self.rpcserver.start()
50
+        LOG.info(('Created RPC server for service %(service)s on host '
51
+                  '%(host)s.', {'service': self.topic, 'host': self.host}))
52
+
53
+    def stop(self):
54
+        super(RPCService, self).stop()
55
+        try:
56
+            self.rpcserver.stop()
57
+            self.rpcserver.wait()
58
+        except Exception as e:
59
+            LOG.exception(('Service error occurred when stopping the '
60
+                           'RPC server. Error: %s', e))
61
+
62
+        LOG.info(('Stopped RPC server for service %(service)s on host '
63
+                  '%(host)s.', {'service': self.topic, 'host': self.host}))

+ 16
- 0
namos/conductor/__init__.py View File

@@ -0,0 +1,16 @@
1
+# -*- coding: utf-8 -*-
2
+
3
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
4
+# not use this file except in compliance with the License. You may obtain
5
+# a copy of the License at
6
+#
7
+#      http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12
+# License for the specific language governing permissions and limitations
13
+# under the License.
14
+
15
+from namos.common import config as namos_config
16
+namos_config.register_conductor_opts()

+ 450
- 0
namos/conductor/manager.py View File

@@ -0,0 +1,450 @@
1
+# -*- coding: utf-8 -*-
2
+
3
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
4
+# not use this file except in compliance with the License. You may obtain
5
+# a copy of the License at
6
+#
7
+#      http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12
+# License for the specific language governing permissions and limitations
13
+# under the License.
14
+
15
+import functools
16
+
17
+from oslo_config import cfg
18
+from oslo_context import context
19
+from oslo_log import log
20
+
21
+from namos.common import config
22
+from namos.common import exception
23
+from namos.db import api as db_api
24
+from namos.db import openstack_drivers
25
+
26
+LOG = log.getLogger(__name__)
27
+
28
+config.register_conductor_opts()
29
+
30
+CONF = cfg.CONF
31
+
32
+
33
+def request_context(func):
34
+    @functools.wraps(func)
35
+    def wrapped(self, ctx, *args, **kwargs):
36
+        if ctx is not None and not isinstance(ctx, context.RequestContext):
37
+            ctx = context.RequestContext.from_dict(ctx.to_dict())
38
+
39
+        return func(self, ctx, *args, **kwargs)
40
+
41
+    return wrapped
42
+
43
+
44
+class ConductorManager(object):
45
+    RPC_API_VERSION = '1.0'
46
+    TOPIC = config.MESSAGE_QUEUE_CONDUCTOR_TOPIC
47
+
48
+    @request_context
49
+    def add_region(self, context, region):
50
+        # Move this try except to wrpper fn of the db layer
51
+        try:
52
+            db_api.region_create(context, region)
53
+        except Exception as e:
54
+            raise exception.NamosException(e)
55
+
56
+    @request_context
57
+    def region_get_all(self, context):
58
+        return db_api.region_get_all(context)
59
+
60
+    @request_context
61
+    def register_myself(self, context, registration_info):
62
+        LOG.info("REGISTERING %s.%s" % (registration_info['project_name'],
63
+                                        registration_info['prog_name']))
64
+
65
+        # Service processing
66
+        sp = ServiceProcessor(registration_info)
67
+        service_worker_id = sp.process_service(context)
68
+
69
+        #  Device Driver processing
70
+        dp = DriverProcessor(service_worker_id,
71
+                             registration_info['config_dict'])
72
+        dp.process_drivers(context)
73
+
74
+        return service_worker_id
75
+
76
+    @request_context
77
+    def service_perspective_get(self,
78
+                                context,
79
+                                service_id,
80
+                                include_details=False):
81
+        return db_api.service_perspective_get(context,
82
+                                              service_id,
83
+                                              include_details)
84
+
85
+    @request_context
86
+    def device_perspective_get(self,
87
+                               context,
88
+                               device_id,
89
+                               include_details=False):
90
+        return db_api.device_perspective_get(context,
91
+                                             device_id,
92
+                                             include_details)
93
+
94
+    @request_context
95
+    def region_perspective_get(self,
96
+                               context,
97
+                               region_id,
98
+                               include_details=False):
99
+        return db_api.region_perspective_get(context,
100
+                                             region_id,
101
+                                             include_details)
102
+
103
+    @request_context
104
+    def infra_perspective_get(self, context):
105
+        return db_api.infra_perspective_get(context)
106
+
107
+
108
+class ServiceProcessor(object):
109
+    def __init__(self, registration_info):
110
+        self.registration_info = registration_info
111
+
112
+    def process_service(self, context):
113
+        # Service Node
114
+        try:
115
+            # TODO(mrkanag) is this to be region specifc search
116
+            node = db_api.service_node_get_by_name(
117
+                context,
118
+                self.registration_info.get('fqdn'))
119
+            LOG.info('Service node %s is existing' % node)
120
+        except exception.ServiceNodeNotFound:
121
+            # TODO(mrkanag) region_id is hard-coded, fix it !
122
+            # user proper node name instead of fqdn
123
+            node = db_api.service_node_create(
124
+                context,
125
+                dict(name=self.registration_info.get('fqdn'),
126
+                     fqdn=self.registration_info.get('fqdn'),
127
+                     region_id='f7dcd175-27ef-46b5-997f-e6e572f320b0'))
128
+
129
+            LOG.info('Service node %s is created' % node)
130
+
131
+        # Service
132
+        try:
133
+            service = db_api.service_get_by_name(
134
+                context,
135
+                self.registration_info.get('project_name'))
136
+            LOG.info('Service %s is existing' % service)
137
+        except exception.ServiceNotFound:
138
+            s_id = 'b9c2549f-f685-4bc2-92e9-ba8af9c18591'
139
+            service = db_api.service_create(
140
+                context,
141
+                # TODO(mrkanag) use keystone python client and
142
+                # use real service id here
143
+                dict(name=self.registration_info.get('project_name'),
144
+                     keystone_service_id=s_id))
145
+
146
+            LOG.info('Service %s is created' % service)
147
+
148
+        # Service Component
149
+        service_components = \
150
+            db_api.service_component_get_all_by_node_for_service(
151
+                context,
152
+                node_id=node.id,
153
+                service_id=service.id,
154
+                name=self.registration_info['prog_name']
155
+            )
156
+        if len(service_components) == 1:
157
+            service_component = service_components[0]
158
+            LOG.info('Service Component %s is existing' % service_component)
159
+        # TODO(mrkanag) what to do when service_components size is > 1
160
+        else:
161
+            service_component = db_api.service_component_create(
162
+                context,
163
+                dict(name=self.registration_info['prog_name'],
164
+                     node_id=node.id,
165
+                     service_id=service.id))
166
+            LOG.info('Service Component %s is created' % service_component)
167
+
168
+        # Service Worker
169
+        # TODO(mrkanag) Find a way to purge the dead service worker
170
+        # Once each service  is enabled with heart beating namos
171
+        # purging can be done once heart beat stopped. this can be
172
+        # done from openstack.common.service.py
173
+        service_workers = \
174
+            db_api.service_worker_get_by_host_for_service_component(
175
+                context,
176
+                service_component_id=service_component.id,
177
+                host=self.registration_info['host']
178
+            )
179
+        if len(service_workers) == 1:
180
+            service_worker = \
181
+                db_api.service_worker_update(
182
+                    context,
183
+                    service_workers[0].id,
184
+                    dict(
185
+                        pid=self.registration_info['pid']))
186
+            LOG.info('Service Worker %s is existing and is updated'
187
+                     % service_worker)
188
+
189
+        # TODO(mrkanag) what to do when service_workers size is > 1
190
+        else:
191
+            service_worker = db_api.service_worker_create(
192
+                context,
193
+                # TODO(mrkanag) Fix the name, device driver proper !
194
+                dict(name='%s@%s' % (self.registration_info['pid'],
195
+                                     service_component.name),
196
+                     pid=self.registration_info['pid'],
197
+                     host=self.registration_info['host'],
198
+                     service_component_id=service_component.id))
199
+            LOG.info('Service Worker %s is created' % service_worker)
200
+
201
+        # Config
202
+        # TODO(mrkanag) Optimize the config like per service_component
203
+        # or per service_worker,
204
+        for cfg_name, cfg_obj in self.registration_info[
205
+            'config_dict'].iteritems():
206
+            cfg_obj['service_worker_id'] = service_worker.id
207
+            configs = db_api.config_get_by_name_for_service_worker(
208
+                context,
209
+                service_worker_id=cfg_obj['service_worker_id'],
210
+                name=cfg_obj['name'])
211
+            if len(configs) == 1:
212
+                config = db_api.config_update(context,
213
+                                              configs[0].id,
214
+                                              cfg_obj)
215
+                LOG.info("Config %s is existing and is updated" % config)
216
+            else:
217
+                config = db_api.config_create(context, cfg_obj)
218
+                LOG.info("Config %s is created" % config)
219
+
220
+        return service_worker.id
221
+
222
+
223
+class DriverProcessor(object):
224
+    def __init__(self, service_worker_id, config_dict):
225
+        self.config_dict = config_dict
226
+        self.service_worker_id = service_worker_id
227
+
228
+    def _identify_drivers(self):
229
+        return (set(openstack_drivers.get_drivers_config().keys()) &
230
+                set(self.config_dict.keys()))
231
+
232
+    def _get_value(self, name):
233
+        if name is None:
234
+            return name
235
+
236
+        if isinstance(name, str):
237
+            # Constant naming
238
+            if name[0] == '#':
239
+                return name[1:]
240
+            return (self.config_dict[name].get('value') or
241
+                    self.config_dict[name].get('default_value'))
242
+        elif isinstance(name, tuple):
243
+            fn = name[0]
244
+            args = list()
245
+            for var in name[1:]:
246
+                args.append(self._get_value(var))
247
+            return fn(*args)
248
+        elif isinstance(name, list):
249
+            fmt_str = name[0]
250
+            params = [self._get_value(param) for param in name[1:]]
251
+            return fmt_str % tuple(params)
252
+
253
+    @staticmethod
254
+    def _to_list(list_in_str):
255
+        def strip_out(s):
256
+            start_idx = 0
257
+            end_idx = len(s)
258
+            if s[start_idx] == '[' \
259
+                    or s[start_idx] == '\'' \
260
+                    or s[start_idx] == '"':
261
+                start_idx += 1
262
+            if s[end_idx - 1] == ']' \
263
+                    or s[end_idx - 1] == '\'' \
264
+                    or s[end_idx - 1] == '"':
265
+                end_idx -= 1
266
+            return s[start_idx:end_idx]
267
+
268
+        l = []
269
+        for s in strip_out(list_in_str.strip()).split(','):
270
+            s = str(strip_out(s.strip()))
271
+            l.append(s)
272
+
273
+        return l
274
+
275
+    def process_drivers(self, context):
276
+        for driver_key in self._identify_drivers():
277
+            drivers = self._get_value(driver_key)
278
+            drivers = DriverProcessor._to_list(drivers)
279
+            for driver_name in drivers:
280
+                self.process_driver(context, driver_key, driver_name)
281
+
282
+    def process_driver(self, context, driver_key, driver_name):
283
+            driver_config = \
284
+                openstack_drivers.get_drivers_config()[driver_key][driver_name]
285
+
286
+            if driver_config.get('alias') is not None:
287
+                alias = driver_config.get('alias')
288
+                driver_config = \
289
+                    openstack_drivers.get_drivers_config()
290
+                for key in alias.split(':'):
291
+                    driver_config = driver_config[key]
292
+                driver_name = key
293
+
294
+            driver_def = \
295
+                openstack_drivers.get_drivers_def()[driver_name]
296
+
297
+            connection = dict()
298
+
299
+            endpoint_type = None
300
+            connection_cfg = None
301
+            device_endpoint_name = None
302
+            device_cfg = None
303
+            child_device_cfg = None
304
+
305
+            if driver_config.get('device') is not None:
306
+                device_cfg = driver_config['device']
307
+
308
+            if driver_config['endpoint'].get('type') is not None:
309
+                endpoint_type = driver_config['endpoint']['type']
310
+                if endpoint_type[0] != '#':
311
+                    endpoint_type = self._get_value(endpoint_type)
312
+
313
+                connection_cfg = driver_config['endpoint'][endpoint_type][
314
+                    'connection']
315
+                device_endpoint_name = self._get_value(
316
+                    driver_config['endpoint'][endpoint_type]['name'])
317
+                # override the device name
318
+                if driver_config['endpoint'][endpoint_type].get(
319
+                        'device') is not None:
320
+                    device_cfg = driver_config['endpoint'][endpoint_type][
321
+                        'device']
322
+                if driver_config['endpoint'][endpoint_type].get(
323
+                        'child_device') is not None:
324
+                    child_device_cfg = driver_config['endpoint'][
325
+                        endpoint_type]['child_device']
326
+            else:
327
+                endpoint_type = None
328
+                connection_cfg = driver_config['endpoint']['connection']
329
+                device_endpoint_name = self._get_value(
330
+                    driver_config['endpoint']['name']
331
+                )
332
+                # override the device name
333
+                if driver_config['endpoint'].get('device') is not None:
334
+                    device_cfg = driver_config['endpoint']['device']
335
+
336
+                if driver_config['endpoint'].get('child_device') is not None:
337
+                    child_device_cfg = driver_config['endpoint'][
338
+                        'child_device']
339
+
340
+            # Device
341
+            device_name = self._get_value(device_cfg['name'])
342
+            try:
343
+                device = db_api.device_get_by_name(
344
+                    context,
345
+                    device_name)
346
+                LOG.info('Device %s is existing' % device)
347
+            except exception.DeviceNotFound:
348
+                # TODO(mrkanag) region_id is hard-coded, fix it !
349
+                # Set the right status as well
350
+                device = db_api.device_create(
351
+                    context,
352
+                    dict(name=device_name,
353
+                         status='active',
354
+                         region_id='f7dcd175-27ef-46b5-997f-e6e572f320b0'))
355
+
356
+                LOG.info('Device %s is created' % device)
357
+
358
+            # Handle child devices
359
+            if child_device_cfg is not None:
360
+                for d_name in self._get_value(child_device_cfg['key']):
361
+                    base_name = self._get_value(child_device_cfg['base_name'])
362
+                    d_name = '%s-%s' % (base_name, d_name)
363
+                    try:
364
+                        device = db_api.device_get_by_name(
365
+                            context,
366
+                            d_name)
367
+                        LOG.info('Device %s is existing' % device)
368
+                    except exception.DeviceNotFound:
369
+                        # TODO(mrkanag) region_id is hard-coded, fix it !
370
+                        # Set the right status as well
371
+                        r_id = 'f7dcd175-27ef-46b5-997f-e6e572f320b0'
372
+                        device = db_api.device_create(
373
+                            context,
374
+                            dict(name=d_name,
375
+                                 status='active',
376
+                                 parent_id=device.id,
377
+                                 region_id=r_id))
378
+
379
+                LOG.info('Device %s is created' % device)
380
+
381
+            # Device Endpoint
382
+            device_endpoints = db_api.device_endpoint_get_by_device_type(
383
+                context,
384
+                device_id=device.id,
385
+                type=endpoint_type,
386
+                name=device_endpoint_name)
387
+            if len(device_endpoints) >= 1:
388
+                device_endpoint = device_endpoints[0]
389
+                LOG.info('Device Endpoint %s is existing' %
390
+                         device_endpoints[0])
391
+            else:
392
+                for k, v in connection_cfg.iteritems():
393
+                    connection[k] = self._get_value(k)
394
+
395
+                device_endpoint = db_api.device_endpoint_create(
396
+                    context,
397
+                    dict(name=device_endpoint_name,
398
+                         connection=connection,
399
+                         type=endpoint_type,
400
+                         device_id=device.id))
401
+                LOG.info('Device Endpoint %s is created' % device_endpoint)
402
+
403
+            # Device Driver Class
404
+            try:
405
+                device_driver_class = db_api.device_driver_class_get_by_name(
406
+                    context,
407
+                    driver_name)
408
+                LOG.info('Device Driver Class %s is existing' %
409
+                         device_driver_class)
410
+            except exception.DeviceDriverClassNotFound:
411
+                device_driver_class = db_api.device_driver_class_create(
412
+                    context,
413
+                    dict(name=driver_name,
414
+                         python_class=driver_name,
415
+                         type=driver_def['type'],
416
+                         device_id=device.id,
417
+                         endpoint_id=device_endpoint.id,
418
+                         service_worker_id=self.service_worker_id,
419
+                         extra=driver_def.get('extra')))
420
+                LOG.info('Device Driver Class %s is created' %
421
+                         device_driver_class)
422
+
423
+            # Device Driver
424
+            device_drivers = \
425
+                db_api.device_driver_get_by_device_endpoint_service_worker(
426
+                    context,
427
+                    device_id=device.id,
428
+                    endpoint_id=device_endpoint.id,
429
+                    device_driver_class_id=device_driver_class.id,
430
+                    service_worker_id=self.service_worker_id
431
+                )
432
+            if len(device_drivers) >= 1:
433
+                device_driver = device_drivers[0]
434
+                LOG.info('Device Driver %s is existing' %
435
+                         device_driver)
436
+            else:
437
+                device_driver = db_api.device_driver_create(
438
+                    context,
439
+                    dict(device_id=device.id,
440
+                         name=driver_name,
441
+                         endpoint_id=device_endpoint.id,
442
+                         device_driver_class_id=device_driver_class.id,
443
+                         service_worker_id=self.service_worker_id)
444
+                )
445
+                LOG.info('Device Driver %s is created' %
446
+                         device_driver)
447
+
448
+
449
+if __name__ == '__main__':
450
+    print (DriverProcessor(None, None)._to_list("[\"file\', \'http\']"))

+ 112
- 0
namos/conductor/rpcapi.py View File

@@ -0,0 +1,112 @@
1
+# -*- coding: utf-8 -*-
2
+
3
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
4
+# not use this file except in compliance with the License. You may obtain
5
+# a copy of the License at
6
+#
7
+#      http://www.apache.org/licenses/LICENSE-2.0
8
+#
9
+# Unless required by applicable law or agreed to in writing, software
10
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12
+# License for the specific language governing permissions and limitations
13
+# under the License.
14
+
15
+"""
16
+Client side of the conductor RPC API.
17
+"""
18
+import functools
19
+import json
20
+
21
+# import oslo_messaging
22
+from oslo_messaging import RemoteError
23
+
24
+from namos.common import config
25
+from namos.common import exception as namos_exception
26
+from namos.common import messaging as rpc
27
+
28
+
29
+def wrapper_function(func):
30
+    @functools.wraps(func)
31
+    def wrapped(*args, **kwargs):
32
+        try:
33
+            return func(*args, **kwargs)
34
+        except RemoteError as e:
35
+            exception = getattr(namos_exception, e.exc_type)
36
+            kwargs = json.loads(e.value)
37
+            raise exception(**kwargs)
38
+
39
+    return wrapped
40
+
41
+
42
+class ConductorAPI(object):
43
+    """Client side of the conductor RPC API.
44
+
45
+    API version history:
46
+
47
+    1.0 - Initial version.
48
+
49
+
50
+    """
51
+
52
+    RPC_API_VERSION = '1.0'
53
+
54
+    def __init__(self):
55
+        super(ConductorAPI, self).__init__()
56
+        self.topic = config.MESSAGE_QUEUE_CONDUCTOR_TOPIC
57
+
58
+        self.client = rpc.get_rpc_client(version=self.RPC_API_VERSION,
59
+                                         topic=self.topic,
60
+                                         exchange=config.PROJECT_NAME)
61
+
62
+    @wrapper_function
63
+    def add_region(self, context, region):
64
+        self.client.call(context, 'add_region', region=region)
65
+
66
+    @wrapper_function
67
+    def region_get_all(self, context):
68
+        return self.client.call(context, 'region_get_all')
69
+
70
+    @wrapper_function
71
+    def service_perspective_get(self, context, service_id,
72
+                                include_details=False):
73
+        return self.client.call(context,
74
+                                'service_perspective_get',
75
+                                service_id=service_id,
76
+                                include_details=include_details)
77
+
78
+    @wrapper_function
79
+    def device_perspective_get(self, context, device_id,
80
+                               include_details=False):
81
+        return self.client.call(context,
82
+                                'device_perspective_get',
83
+                                device_id=device_id,
84
+                                include_details=include_details)
85
+
86
+    @wrapper_function
87
+    def region_perspective_get(self, context, region_id,
88
+                               include_details=False):
89
+        return self.client.call(context,
90
+                                'region_perspective_get',
91
+                                region_id=region_id,
92
+                                include_details=include_details)
93
+
94
+    @wrapper_function
95
+    def infra_perspective_get(self, context):
96
+        return self.client.call(context,
97
+                                'infra_perspective_get')
98
+
99
+if __name__ == '__main__':
100
+    # from namos.common import config
101
+
102
+    config.init_log()
103
+    config.init_conf('test-run')
104
+
105
+    from oslo_context import context
106
+
107
+    c = ConductorAPI()
108
+    c.add_region(context.RequestContext(),
109
+                 {'name': 'RegionOne11',
110
+                  'keystone_region_id': 'region_one',
111
+                  'extra': {'location': 'bangalore'},
112
+                  'id': 'd7dcd175-27ef-46b5-997f-e6e572f320af'})

+ 5
- 0
requirements.txt View File

@@ -13,3 +13,8 @@ SQLAlchemy<1.1.0,>=1.0.10 # MIT
13 13
 sqlalchemy-migrate>=0.9.6 # Apache-2.0
14 14
 PyMySQL
15 15
 #rpc service layer
16
+oslo.context>=0.2.0 # Apache-2.0
17
+oslo.serialization>=1.10.0 # Apache-2.0
18
+oslo.messaging>=4.0.0 # Apache-2.0
19
+oslo.service>=1.0.0 # Apache-2.0
20
+oslo.utils>=3.5.0 # Apache-2.0

Loading…
Cancel
Save