Applied cookiecutter template
Change-Id: Id860d5c4c97ae02c361c06e407a2e11cb3379783
This commit is contained in:
parent
7e5ced270f
commit
7fd6112605
4
.gitreview
Normal file
4
.gitreview
Normal file
@ -0,0 +1,4 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/iotronic.git
|
17
CONTRIBUTING.rst
Normal file
17
CONTRIBUTING.rst
Normal file
@ -0,0 +1,17 @@
|
||||
If you would like to contribute to the development of OpenStack, you must
|
||||
follow the steps in this page:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
If you already have a good understanding of how the system works and your
|
||||
OpenStack accounts are set up, you can skip to the development workflow
|
||||
section of this documentation to learn how changes to OpenStack should be
|
||||
submitted for review via the Gerrit tool:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html#development-workflow
|
||||
|
||||
Pull requests submitted through GitHub will be ignored.
|
||||
|
||||
Bugs should be filed on Launchpad, not GitHub:
|
||||
|
||||
https://bugs.launchpad.net/iotronic
|
4
HACKING.rst
Normal file
4
HACKING.rst
Normal file
@ -0,0 +1,4 @@
|
||||
iotronic Style Commandments
|
||||
===============================================
|
||||
|
||||
Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/
|
30
LICENSE
30
LICENSE
@ -1,4 +1,5 @@
|
||||
Apache License
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
@ -173,30 +174,3 @@ Apache License
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
6
MANIFEST.in
Normal file
6
MANIFEST.in
Normal file
@ -0,0 +1,6 @@
|
||||
include AUTHORS
|
||||
include ChangeLog
|
||||
exclude .gitignore
|
||||
exclude .gitreview
|
||||
|
||||
global-exclude *.pyc
|
19
README.rst
Normal file
19
README.rst
Normal file
@ -0,0 +1,19 @@
|
||||
===============================
|
||||
iotronic
|
||||
===============================
|
||||
|
||||
IoTronic is an Internet of Things resource management service for OpenStack clouds.
|
||||
|
||||
Please feel here a long description which must be at least 3 lines wrapped on
|
||||
80 cols, so that distribution package maintainers can use it in their packages.
|
||||
Note that this is a hard requirement.
|
||||
|
||||
* Free software: Apache license
|
||||
* Documentation: http://docs.openstack.org/developer/iotronic
|
||||
* Source: http://git.openstack.org/cgit/openstack/iotronic
|
||||
* Bugs: http://bugs.launchpad.net/iotronic
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
* TODO
|
75
doc/source/conf.py
Executable file
75
doc/source/conf.py
Executable file
@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'oslosphinx'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'iotronic'
|
||||
copyright = u'2013, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
4
doc/source/contributing.rst
Normal file
4
doc/source/contributing.rst
Normal file
@ -0,0 +1,4 @@
|
||||
============
|
||||
Contributing
|
||||
============
|
||||
.. include:: ../../CONTRIBUTING.rst
|
25
doc/source/index.rst
Normal file
25
doc/source/index.rst
Normal file
@ -0,0 +1,25 @@
|
||||
.. iotronic documentation master file, created by
|
||||
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to iotronic's documentation!
|
||||
========================================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
readme
|
||||
installation
|
||||
usage
|
||||
contributing
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
12
doc/source/installation.rst
Normal file
12
doc/source/installation.rst
Normal file
@ -0,0 +1,12 @@
|
||||
============
|
||||
Installation
|
||||
============
|
||||
|
||||
At the command line::
|
||||
|
||||
$ pip install iotronic
|
||||
|
||||
Or, if you have virtualenvwrapper installed::
|
||||
|
||||
$ mkvirtualenv iotronic
|
||||
$ pip install iotronic
|
1
doc/source/readme.rst
Normal file
1
doc/source/readme.rst
Normal file
@ -0,0 +1 @@
|
||||
.. include:: ../../README.rst
|
7
doc/source/usage.rst
Normal file
7
doc/source/usage.rst
Normal file
@ -0,0 +1,7 @@
|
||||
========
|
||||
Usage
|
||||
========
|
||||
|
||||
To use iotronic in a project::
|
||||
|
||||
import iotronic
|
@ -13,10 +13,9 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import eventlet
|
||||
import os
|
||||
|
||||
|
||||
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
|
||||
|
||||
import eventlet
|
||||
|
||||
eventlet.monkey_patch(os=False)
|
||||
eventlet.monkey_patch(os=False)
|
||||
|
@ -27,7 +27,7 @@ API_SERVICE_OPTS = [
|
||||
default=1000,
|
||||
help='The maximum number of items returned in a single '
|
||||
'response from a collection resource.'),
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
@ -35,4 +35,4 @@ CONF = cfg.CONF
|
||||
opt_group = cfg.OptGroup(name='api',
|
||||
title='Options for the iotronic-api service')
|
||||
CONF.register_group(opt_group)
|
||||
CONF.register_opts(API_SERVICE_OPTS, opt_group)
|
||||
CONF.register_opts(API_SERVICE_OPTS, opt_group)
|
||||
|
@ -25,16 +25,19 @@ from iotronic.api import middleware
|
||||
|
||||
|
||||
api_opts = [
|
||||
cfg.StrOpt('auth_strategy',
|
||||
cfg.StrOpt(
|
||||
'auth_strategy',
|
||||
default='keystone',
|
||||
help='Authentication strategy used by iotronic-api: one of "keystone" '
|
||||
'or "noauth". "noauth" should not be used in a production '
|
||||
'environment because all authentication will be disabled.'),
|
||||
cfg.BoolOpt('pecan_debug',
|
||||
default=False,
|
||||
help=('Enable pecan debug mode. WARNING: this is insecure '
|
||||
'and should not be used in a production environment.')),
|
||||
]
|
||||
'or "noauth". "noauth" should not be used in a production '
|
||||
'environment because all authentication will be disabled.'),
|
||||
cfg.BoolOpt(
|
||||
'pecan_debug',
|
||||
default=False,
|
||||
help=(
|
||||
'Enable pecan debug mode. WARNING: this is insecure '
|
||||
'and should not be used in a production environment.')),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(api_opts)
|
||||
@ -79,6 +82,7 @@ def setup_app(pecan_config=None, extra_hooks=None):
|
||||
|
||||
|
||||
class VersionSelectorApplication(object):
|
||||
|
||||
def __init__(self):
|
||||
pc = get_pecan_config()
|
||||
pc.app.enable_acl = (CONF.auth_strategy == 'keystone')
|
||||
|
@ -30,8 +30,8 @@ app = {
|
||||
'acl_public_routes': [
|
||||
'/',
|
||||
'/v1',
|
||||
#'/v1/drivers/[a-z_]*/vendor_passthru/lookup',
|
||||
#'/v1/nodes/[a-z0-9\-]+/vendor_passthru/heartbeat',
|
||||
# '/v1/drivers/[a-z_]*/vendor_passthru/lookup',
|
||||
# '/v1/nodes/[a-z0-9\-]+/vendor_passthru/heartbeat',
|
||||
'/v1/nodes/[a-z0-9\-]',
|
||||
],
|
||||
}
|
||||
|
@ -70,8 +70,8 @@ class Version(object):
|
||||
:param latest_version: version to use if latest is requested
|
||||
:raises: webob.HTTPNotAcceptable
|
||||
"""
|
||||
(self.major, self.minor) = Version.parse_headers(headers,
|
||||
default_version, latest_version)
|
||||
(self.major, self.minor) = Version.parse_headers(
|
||||
headers, default_version, latest_version)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s.%s' % (self.major, self.minor)
|
||||
|
@ -16,24 +16,23 @@
|
||||
Version 1 of the Iotronic API
|
||||
"""
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers import link
|
||||
from iotronic.api.controllers.v1 import node
|
||||
from iotronic.api import expose
|
||||
from iotronic.common.i18n import _
|
||||
import pecan
|
||||
from pecan import rest
|
||||
from webob import exc
|
||||
from wsme import types as wtypes
|
||||
from iotronic.api.controllers import link
|
||||
from iotronic.api.controllers.v1 import node
|
||||
|
||||
'''
|
||||
#from iotronic.api.controllers.v1 import chassis
|
||||
#from iotronic.api.controllers.v1 import driver
|
||||
# from iotronic.api.controllers.v1 import chassis
|
||||
# from iotronic.api.controllers.v1 import driver
|
||||
|
||||
#from iotronic.api.controllers.v1 import port
|
||||
# from iotronic.api.controllers.v1 import port
|
||||
'''
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api import expose
|
||||
from iotronic.common.i18n import _
|
||||
|
||||
BASE_VERSION = 1
|
||||
|
||||
MIN_VER_STR = '1.0'
|
||||
@ -56,7 +55,7 @@ class MediaType(base.APIBase):
|
||||
def __init__(self, base, type):
|
||||
self.base = base
|
||||
self.type = type
|
||||
'''
|
||||
'''
|
||||
|
||||
|
||||
class V1(base.APIBase):
|
||||
@ -65,29 +64,29 @@ class V1(base.APIBase):
|
||||
id = wtypes.text
|
||||
"""The ID of the version, also acts as the release number"""
|
||||
|
||||
#media_types = [MediaType]
|
||||
# media_types = [MediaType]
|
||||
"""An array of supported media types for this version"""
|
||||
|
||||
#links = [link.Link]
|
||||
# links = [link.Link]
|
||||
"""Links that point to a specific URL for this version and documentation"""
|
||||
|
||||
#chassis = [link.Link]
|
||||
# chassis = [link.Link]
|
||||
"""Links to the chassis resource"""
|
||||
|
||||
nodes = [link.Link]
|
||||
"""Links to the nodes resource"""
|
||||
|
||||
#ports = [link.Link]
|
||||
|
||||
# ports = [link.Link]
|
||||
"""Links to the ports resource"""
|
||||
|
||||
#drivers = [link.Link]
|
||||
# drivers = [link.Link]
|
||||
"""Links to the drivers resource"""
|
||||
|
||||
@staticmethod
|
||||
def convert():
|
||||
v1 = V1()
|
||||
v1.id = "v1"
|
||||
|
||||
|
||||
v1.nodes = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'nodes', ''),
|
||||
link.Link.make_link('bookmark',
|
||||
@ -95,7 +94,7 @@ class V1(base.APIBase):
|
||||
'nodes', '',
|
||||
bookmark=True)
|
||||
]
|
||||
|
||||
|
||||
'''
|
||||
v1.links = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'v1', '', bookmark=True),
|
||||
@ -105,10 +104,10 @@ class V1(base.APIBase):
|
||||
'api-spec-v1.html',
|
||||
bookmark=True, type='text/html')
|
||||
]
|
||||
|
||||
|
||||
v1.media_types = [MediaType('application/json',
|
||||
'application/vnd.openstack.iotronic.v1+json')]
|
||||
|
||||
|
||||
v1.chassis = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'chassis', ''),
|
||||
link.Link.make_link('bookmark',
|
||||
@ -138,11 +137,11 @@ class V1(base.APIBase):
|
||||
|
||||
class Controller(rest.RestController):
|
||||
"""Version 1 API controller root."""
|
||||
|
||||
|
||||
nodes = node.NodesController()
|
||||
#ports = port.PortsController()
|
||||
#chassis = chassis.ChassisController()
|
||||
#drivers = driver.DriversController()
|
||||
# ports = port.PortsController()
|
||||
# chassis = chassis.ChassisController()
|
||||
# drivers = driver.DriversController()
|
||||
|
||||
@expose.expose(V1)
|
||||
def get(self):
|
||||
@ -159,8 +158,10 @@ class Controller(rest.RestController):
|
||||
raise exc.HTTPNotAcceptable(_(
|
||||
"Mutually exclusive versions requested. Version %(ver)s "
|
||||
"requested but not supported by this service. The supported "
|
||||
"version range is: [%(min)s, %(max)s].") % {'ver': version,
|
||||
'min': MIN_VER_STR, 'max': MAX_VER_STR}, headers=headers)
|
||||
"version range is: [%(min)s,%(max)s]."
|
||||
) % {'ver': version, 'min': MIN_VER_STR,
|
||||
'max': MAX_VER_STR},
|
||||
headers=headers)
|
||||
# ensure the minor version is within the supported range
|
||||
if version < MIN_VER or version > MAX_VER:
|
||||
raise exc.HTTPNotAcceptable(_(
|
||||
|
@ -1,207 +0,0 @@
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Version 1 of the Iotronic API
|
||||
"""
|
||||
|
||||
import pecan
|
||||
from pecan import rest
|
||||
from webob import exc
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers import link
|
||||
#from iotronic.api.controllers.v1 import chassis
|
||||
#from iotronic.api.controllers.v1 import driver
|
||||
from iotronic.api.controllers.v1 import node
|
||||
from iotronic.api.controllers.v1 import board
|
||||
#from iotronic.api.controllers.v1 import port
|
||||
from iotronic.api import expose
|
||||
from iotronic.common.i18n import _
|
||||
|
||||
|
||||
|
||||
BASE_VERSION = 1
|
||||
|
||||
# NOTE(deva): v1.0 is reserved to indicate Juno's API, but is not presently
|
||||
# supported by the API service. All changes between Juno and the
|
||||
# point where we added microversioning are considered backwards-
|
||||
# compatible, but are not specifically discoverable at this time.
|
||||
#
|
||||
# The v1.1 version indicates this "initial" version as being
|
||||
# different from Juno (v1.0), and includes the following changes:
|
||||
#
|
||||
# 827db7fe: Add Node.maintenance_reason
|
||||
# 68eed82b: Add API endpoint to set/unset the node maintenance mode
|
||||
# bc973889: Add sync and async support for passthru methods
|
||||
# e03f443b: Vendor endpoints to support different HTTP methods
|
||||
# e69e5309: Make vendor methods discoverable via the Iotronic API
|
||||
# edf532db: Add logic to store the config drive passed by Nova
|
||||
|
||||
# v1.1: API at the point in time when microversioning support was added
|
||||
MIN_VER_STR = '1.0'
|
||||
|
||||
# v1.2: Renamed NOSTATE ("None") to AVAILABLE ("available")
|
||||
# v1.3: Add node.driver_internal_info
|
||||
# v1.4: Add MANAGEABLE state
|
||||
# v1.5: Add logical node names
|
||||
# v1.6: Add INSPECT* states
|
||||
MAX_VER_STR = '1.0'
|
||||
|
||||
|
||||
MIN_VER = base.Version({base.Version.string: MIN_VER_STR},
|
||||
MIN_VER_STR, MAX_VER_STR)
|
||||
MAX_VER = base.Version({base.Version.string: MAX_VER_STR},
|
||||
MIN_VER_STR, MAX_VER_STR)
|
||||
|
||||
|
||||
class MediaType(base.APIBase):
|
||||
"""A media type representation."""
|
||||
|
||||
base = wtypes.text
|
||||
type = wtypes.text
|
||||
|
||||
def __init__(self, base, type):
|
||||
self.base = base
|
||||
self.type = type
|
||||
|
||||
|
||||
class V1(base.APIBase):
|
||||
"""The representation of the version 1 of the API."""
|
||||
|
||||
id = wtypes.text
|
||||
"""The ID of the version, also acts as the release number"""
|
||||
|
||||
media_types = [MediaType]
|
||||
"""An array of supported media types for this version"""
|
||||
|
||||
links = [link.Link]
|
||||
"""Links that point to a specific URL for this version and documentation"""
|
||||
|
||||
#chassis = [link.Link]
|
||||
"""Links to the chassis resource"""
|
||||
|
||||
nodes = [link.Link]
|
||||
"""Links to the nodes resource"""
|
||||
|
||||
#ports = [link.Link]
|
||||
"""Links to the ports resource"""
|
||||
|
||||
#drivers = [link.Link]
|
||||
"""Links to the drivers resource"""
|
||||
|
||||
@staticmethod
|
||||
def convert():
|
||||
v1 = V1()
|
||||
v1.id = "v1"
|
||||
|
||||
v1.links = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'v1', '', bookmark=True),
|
||||
link.Link.make_link('describedby',
|
||||
'http://docs.openstack.org',
|
||||
'developer/iotronic/dev',
|
||||
'api-spec-v1.html',
|
||||
bookmark=True, type='text/html')
|
||||
]
|
||||
|
||||
v1.media_types = [MediaType('application/json',
|
||||
'application/vnd.openstack.iotronic.v1+json')]
|
||||
'''
|
||||
v1.chassis = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'chassis', ''),
|
||||
link.Link.make_link('bookmark',
|
||||
pecan.request.host_url,
|
||||
'chassis', '',
|
||||
bookmark=True)
|
||||
]
|
||||
'''
|
||||
v1.nodes = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'nodes', ''),
|
||||
link.Link.make_link('bookmark',
|
||||
pecan.request.host_url,
|
||||
'nodes', '',
|
||||
bookmark=True)
|
||||
]
|
||||
'''
|
||||
v1.ports = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'ports', ''),
|
||||
link.Link.make_link('bookmark',
|
||||
pecan.request.host_url,
|
||||
'ports', '',
|
||||
bookmark=True)
|
||||
]
|
||||
v1.drivers = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'drivers', ''),
|
||||
link.Link.make_link('bookmark',
|
||||
pecan.request.host_url,
|
||||
'drivers', '',
|
||||
bookmark=True)
|
||||
]
|
||||
'''
|
||||
return v1
|
||||
|
||||
|
||||
class Controller(rest.RestController):
|
||||
"""Version 1 API controller root."""
|
||||
|
||||
nodes = node.NodesController()
|
||||
#ports = port.PortsController()
|
||||
#chassis = chassis.ChassisController()
|
||||
#drivers = driver.DriversController()
|
||||
boards= board.BoardsController()
|
||||
|
||||
@expose.expose(V1)
|
||||
def get(self):
|
||||
# NOTE: The reason why convert() it's being called for every
|
||||
# request is because we need to get the host url from
|
||||
# the request object to make the links.
|
||||
return V1.convert()
|
||||
|
||||
def _check_version(self, version, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
# ensure that major version in the URL matches the header
|
||||
if version.major != BASE_VERSION:
|
||||
raise exc.HTTPNotAcceptable(_(
|
||||
"Mutually exclusive versions requested. Version %(ver)s "
|
||||
"requested but not supported by this service. The supported "
|
||||
"version range is: [%(min)s, %(max)s].") % {'ver': version,
|
||||
'min': MIN_VER_STR, 'max': MAX_VER_STR}, headers=headers)
|
||||
# ensure the minor version is within the supported range
|
||||
if version < MIN_VER or version > MAX_VER:
|
||||
raise exc.HTTPNotAcceptable(_(
|
||||
"Version %(ver)s was requested but the minor version is not "
|
||||
"supported by this service. The supported version range is: "
|
||||
"[%(min)s, %(max)s].") % {'ver': version, 'min': MIN_VER_STR,
|
||||
'max': MAX_VER_STR}, headers=headers)
|
||||
|
||||
@pecan.expose()
|
||||
def _route(self, args):
|
||||
v = base.Version(pecan.request.headers, MIN_VER_STR, MAX_VER_STR)
|
||||
|
||||
# Always set the min and max headers
|
||||
pecan.response.headers[base.Version.min_string] = MIN_VER_STR
|
||||
pecan.response.headers[base.Version.max_string] = MAX_VER_STR
|
||||
|
||||
# assert that requested version is supported
|
||||
self._check_version(v, pecan.response.headers)
|
||||
pecan.response.headers[base.Version.string] = str(v)
|
||||
pecan.request.version = v
|
||||
|
||||
return super(Controller, self)._route(args)
|
||||
|
||||
|
||||
__all__ = (Controller)
|
@ -1,270 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
import pecan
|
||||
from pecan import rest
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers import link
|
||||
from iotronic.api.controllers.v1 import collection
|
||||
from iotronic.api.controllers.v1 import node
|
||||
from iotronic.api.controllers.v1 import types
|
||||
from iotronic.api.controllers.v1 import utils as api_utils
|
||||
from iotronic.api import expose
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic import objects
|
||||
|
||||
|
||||
class ChassisPatchType(types.JsonPatchType):
|
||||
pass
|
||||
|
||||
|
||||
class Chassis(base.APIBase):
|
||||
"""API representation of a chassis.
|
||||
|
||||
This class enforces type checking and value constraints, and converts
|
||||
between the internal object model and the API representation of
|
||||
a chassis.
|
||||
"""
|
||||
|
||||
uuid = types.uuid
|
||||
"""The UUID of the chassis"""
|
||||
|
||||
description = wtypes.text
|
||||
"""The description of the chassis"""
|
||||
|
||||
extra = {wtypes.text: types.jsontype}
|
||||
"""The metadata of the chassis"""
|
||||
|
||||
links = wsme.wsattr([link.Link], readonly=True)
|
||||
"""A list containing a self link and associated chassis links"""
|
||||
|
||||
nodes = wsme.wsattr([link.Link], readonly=True)
|
||||
"""Links to the collection of nodes contained in this chassis"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.fields = []
|
||||
for field in objects.Chassis.fields:
|
||||
# Skip fields we do not expose.
|
||||
if not hasattr(self, field):
|
||||
continue
|
||||
self.fields.append(field)
|
||||
setattr(self, field, kwargs.get(field, wtypes.Unset))
|
||||
|
||||
@staticmethod
|
||||
def _convert_with_links(chassis, url, expand=True):
|
||||
if not expand:
|
||||
chassis.unset_fields_except(['uuid', 'description'])
|
||||
else:
|
||||
chassis.nodes = [link.Link.make_link('self',
|
||||
url,
|
||||
'chassis',
|
||||
chassis.uuid + "/nodes"),
|
||||
link.Link.make_link('bookmark',
|
||||
url,
|
||||
'chassis',
|
||||
chassis.uuid + "/nodes",
|
||||
bookmark=True)
|
||||
]
|
||||
chassis.links = [link.Link.make_link('self',
|
||||
url,
|
||||
'chassis', chassis.uuid),
|
||||
link.Link.make_link('bookmark',
|
||||
url,
|
||||
'chassis', chassis.uuid,
|
||||
bookmark=True)
|
||||
]
|
||||
return chassis
|
||||
|
||||
@classmethod
|
||||
def convert_with_links(cls, rpc_chassis, expand=True):
|
||||
chassis = Chassis(**rpc_chassis.as_dict())
|
||||
return cls._convert_with_links(chassis, pecan.request.host_url,
|
||||
expand)
|
||||
|
||||
@classmethod
|
||||
def sample(cls, expand=True):
|
||||
time = datetime.datetime(2000, 1, 1, 12, 0, 0)
|
||||
sample = cls(uuid='eaaca217-e7d8-47b4-bb41-3f99f20eed89', extra={},
|
||||
description='Sample chassis', created_at=time,
|
||||
updated_at=time)
|
||||
return cls._convert_with_links(sample, 'http://localhost:6385',
|
||||
expand)
|
||||
|
||||
|
||||
class ChassisCollection(collection.Collection):
|
||||
"""API representation of a collection of chassis."""
|
||||
|
||||
chassis = [Chassis]
|
||||
"""A list containing chassis objects"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self._type = 'chassis'
|
||||
|
||||
@staticmethod
|
||||
def convert_with_links(chassis, limit, url=None, expand=False, **kwargs):
|
||||
collection = ChassisCollection()
|
||||
collection.chassis = [Chassis.convert_with_links(ch, expand)
|
||||
for ch in chassis]
|
||||
url = url or None
|
||||
collection.next = collection.get_next(limit, url=url, **kwargs)
|
||||
return collection
|
||||
|
||||
@classmethod
|
||||
def sample(cls, expand=True):
|
||||
sample = cls()
|
||||
sample.chassis = [Chassis.sample(expand=False)]
|
||||
return sample
|
||||
|
||||
|
||||
class ChassisController(rest.RestController):
|
||||
"""REST controller for Chassis."""
|
||||
|
||||
nodes = node.NodesController()
|
||||
"""Expose nodes as a sub-element of chassis"""
|
||||
|
||||
# Set the flag to indicate that the requests to this resource are
|
||||
# coming from a top-level resource
|
||||
nodes.from_chassis = True
|
||||
|
||||
_custom_actions = {
|
||||
'detail': ['GET'],
|
||||
}
|
||||
|
||||
invalid_sort_key_list = ['extra']
|
||||
|
||||
def _get_chassis_collection(self, marker, limit, sort_key, sort_dir,
|
||||
expand=False, resource_url=None):
|
||||
limit = api_utils.validate_limit(limit)
|
||||
sort_dir = api_utils.validate_sort_dir(sort_dir)
|
||||
marker_obj = None
|
||||
if marker:
|
||||
marker_obj = objects.Chassis.get_by_uuid(pecan.request.context,
|
||||
marker)
|
||||
|
||||
if sort_key in self.invalid_sort_key_list:
|
||||
raise exception.InvalidParameterValue(_(
|
||||
"The sort_key value %(key)s is an invalid field for sorting")
|
||||
% {'key': sort_key})
|
||||
|
||||
chassis = objects.Chassis.list(pecan.request.context, limit,
|
||||
marker_obj, sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return ChassisCollection.convert_with_links(chassis, limit,
|
||||
url=resource_url,
|
||||
expand=expand,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
|
||||
@expose.expose(ChassisCollection, types.uuid,
|
||||
int, wtypes.text, wtypes.text)
|
||||
def get_all(self, marker=None, limit=None, sort_key='id', sort_dir='asc'):
|
||||
"""Retrieve a list of chassis.
|
||||
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param sort_key: column to sort results by. Default: id.
|
||||
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
|
||||
"""
|
||||
return self._get_chassis_collection(marker, limit, sort_key, sort_dir)
|
||||
|
||||
@expose.expose(ChassisCollection, types.uuid, int,
|
||||
wtypes.text, wtypes.text)
|
||||
def detail(self, marker=None, limit=None, sort_key='id', sort_dir='asc'):
|
||||
"""Retrieve a list of chassis with detail.
|
||||
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param sort_key: column to sort results by. Default: id.
|
||||
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
|
||||
"""
|
||||
# /detail should only work against collections
|
||||
parent = pecan.request.path.split('/')[:-1][-1]
|
||||
if parent != "chassis":
|
||||
raise exception.HTTPNotFound
|
||||
|
||||
expand = True
|
||||
resource_url = '/'.join(['chassis', 'detail'])
|
||||
return self._get_chassis_collection(marker, limit, sort_key, sort_dir,
|
||||
expand, resource_url)
|
||||
|
||||
@expose.expose(Chassis, types.uuid)
|
||||
def get_one(self, chassis_uuid):
|
||||
"""Retrieve information about the given chassis.
|
||||
|
||||
:param chassis_uuid: UUID of a chassis.
|
||||
"""
|
||||
rpc_chassis = objects.Chassis.get_by_uuid(pecan.request.context,
|
||||
chassis_uuid)
|
||||
return Chassis.convert_with_links(rpc_chassis)
|
||||
|
||||
@expose.expose(Chassis, body=Chassis, status_code=201)
|
||||
def post(self, chassis):
|
||||
"""Create a new chassis.
|
||||
|
||||
:param chassis: a chassis within the request body.
|
||||
"""
|
||||
new_chassis = objects.Chassis(pecan.request.context,
|
||||
**chassis.as_dict())
|
||||
new_chassis.create()
|
||||
# Set the HTTP Location Header
|
||||
pecan.response.location = link.build_url('chassis', new_chassis.uuid)
|
||||
return Chassis.convert_with_links(new_chassis)
|
||||
|
||||
@wsme.validate(types.uuid, [ChassisPatchType])
|
||||
@expose.expose(Chassis, types.uuid, body=[ChassisPatchType])
|
||||
def patch(self, chassis_uuid, patch):
|
||||
"""Update an existing chassis.
|
||||
|
||||
:param chassis_uuid: UUID of a chassis.
|
||||
:param patch: a json PATCH document to apply to this chassis.
|
||||
"""
|
||||
rpc_chassis = objects.Chassis.get_by_uuid(pecan.request.context,
|
||||
chassis_uuid)
|
||||
try:
|
||||
chassis = Chassis(**api_utils.apply_jsonpatch(
|
||||
rpc_chassis.as_dict(), patch))
|
||||
except api_utils.JSONPATCH_EXCEPTIONS as e:
|
||||
raise exception.PatchError(patch=patch, reason=e)
|
||||
|
||||
# Update only the fields that have changed
|
||||
for field in objects.Chassis.fields:
|
||||
try:
|
||||
patch_val = getattr(chassis, field)
|
||||
except AttributeError:
|
||||
# Ignore fields that aren't exposed in the API
|
||||
continue
|
||||
if patch_val == wtypes.Unset:
|
||||
patch_val = None
|
||||
if rpc_chassis[field] != patch_val:
|
||||
rpc_chassis[field] = patch_val
|
||||
|
||||
rpc_chassis.save()
|
||||
return Chassis.convert_with_links(rpc_chassis)
|
||||
|
||||
@expose.expose(None, types.uuid, status_code=204)
|
||||
def delete(self, chassis_uuid):
|
||||
"""Delete a chassis.
|
||||
|
||||
:param chassis_uuid: UUID of a chassis.
|
||||
"""
|
||||
rpc_chassis = objects.Chassis.get_by_uuid(pecan.request.context,
|
||||
chassis_uuid)
|
||||
rpc_chassis.destroy()
|
@ -1,48 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pecan
|
||||
from wsme import types as wtypes
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers import link
|
||||
|
||||
|
||||
class Collection(base.APIBase):
|
||||
|
||||
next = wtypes.text
|
||||
"""A link to retrieve the next subset of the collection"""
|
||||
|
||||
@property
|
||||
def collection(self):
|
||||
return getattr(self, self._type)
|
||||
|
||||
def has_next(self, limit):
|
||||
"""Return whether collection has more items."""
|
||||
return len(self.collection) and len(self.collection) == limit
|
||||
|
||||
def get_next(self, limit, url=None, **kwargs):
|
||||
"""Return a link to the next subset of the collection."""
|
||||
if not self.has_next(limit):
|
||||
return wtypes.Unset
|
||||
|
||||
resource_url = url or self._type
|
||||
q_args = ''.join(['%s=%s&' % (key, kwargs[key]) for key in kwargs])
|
||||
next_args = '?%(args)slimit=%(limit)d&marker=%(marker)s' % {
|
||||
'args': q_args, 'limit': limit,
|
||||
'marker': self.collection[-1].uuid}
|
||||
|
||||
return link.Link.make_link('next', pecan.request.host_url,
|
||||
resource_url, next_args).href
|
@ -1,210 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pecan
|
||||
from pecan import rest
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers import link
|
||||
from iotronic.api import expose
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _
|
||||
|
||||
|
||||
# Property information for drivers:
|
||||
# key = driver name;
|
||||
# value = dictionary of properties of that driver:
|
||||
# key = property name.
|
||||
# value = description of the property.
|
||||
# NOTE(rloo). This is cached for the lifetime of the API service. If one or
|
||||
# more conductor services are restarted with new driver versions, the API
|
||||
# service should be restarted.
|
||||
_DRIVER_PROPERTIES = {}
|
||||
|
||||
# Vendor information for drivers:
|
||||
# key = driver name;
|
||||
# value = dictionary of vendor methods of that driver:
|
||||
# key = method name.
|
||||
# value = dictionary with the metadata of that method.
|
||||
# NOTE(lucasagomes). This is cached for the lifetime of the API
|
||||
# service. If one or more conductor services are restarted with new driver
|
||||
# versions, the API service should be restarted.
|
||||
_VENDOR_METHODS = {}
|
||||
|
||||
|
||||
class Driver(base.APIBase):
|
||||
"""API representation of a driver."""
|
||||
|
||||
name = wtypes.text
|
||||
"""The name of the driver"""
|
||||
|
||||
hosts = [wtypes.text]
|
||||
"""A list of active conductors that support this driver"""
|
||||
|
||||
links = wsme.wsattr([link.Link], readonly=True)
|
||||
"""A list containing self and bookmark links"""
|
||||
|
||||
@staticmethod
|
||||
def convert_with_links(name, hosts):
|
||||
driver = Driver()
|
||||
driver.name = name
|
||||
driver.hosts = hosts
|
||||
driver.links = [
|
||||
link.Link.make_link('self',
|
||||
pecan.request.host_url,
|
||||
'drivers', name),
|
||||
link.Link.make_link('bookmark',
|
||||
pecan.request.host_url,
|
||||
'drivers', name,
|
||||
bookmark=True)
|
||||
]
|
||||
return driver
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
sample = cls(name="sample-driver",
|
||||
hosts=["fake-host"])
|
||||
return sample
|
||||
|
||||
|
||||
class DriverList(base.APIBase):
|
||||
"""API representation of a list of drivers."""
|
||||
|
||||
drivers = [Driver]
|
||||
"""A list containing drivers objects"""
|
||||
|
||||
@staticmethod
|
||||
def convert_with_links(drivers):
|
||||
collection = DriverList()
|
||||
collection.drivers = [
|
||||
Driver.convert_with_links(dname, list(drivers[dname]))
|
||||
for dname in drivers]
|
||||
return collection
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
sample = cls()
|
||||
sample.drivers = [Driver.sample()]
|
||||
return sample
|
||||
|
||||
|
||||
class DriverPassthruController(rest.RestController):
|
||||
"""REST controller for driver passthru.
|
||||
|
||||
This controller allow vendors to expose cross-node functionality in the
|
||||
Iotronic API. Iotronic will merely relay the message from here to the specified
|
||||
driver, no introspection will be made in the message body.
|
||||
"""
|
||||
|
||||
_custom_actions = {
|
||||
'methods': ['GET']
|
||||
}
|
||||
|
||||
@expose.expose(wtypes.text, wtypes.text)
|
||||
def methods(self, driver_name):
|
||||
"""Retrieve information about vendor methods of the given driver.
|
||||
|
||||
:param driver_name: name of the driver.
|
||||
:returns: dictionary with <vendor method name>:<method metadata>
|
||||
entries.
|
||||
:raises: DriverNotFound if the driver name is invalid or the
|
||||
driver cannot be loaded.
|
||||
"""
|
||||
if driver_name not in _VENDOR_METHODS:
|
||||
topic = pecan.request.rpcapi.get_topic_for_driver(driver_name)
|
||||
ret = pecan.request.rpcapi.get_driver_vendor_passthru_methods(
|
||||
pecan.request.context, driver_name, topic=topic)
|
||||
_VENDOR_METHODS[driver_name] = ret
|
||||
|
||||
return _VENDOR_METHODS[driver_name]
|
||||
|
||||
@expose.expose(wtypes.text, wtypes.text, wtypes.text,
|
||||
body=wtypes.text)
|
||||
def _default(self, driver_name, method, data=None):
|
||||
"""Call a driver API extension.
|
||||
|
||||
:param driver_name: name of the driver to call.
|
||||
:param method: name of the method, to be passed to the vendor
|
||||
implementation.
|
||||
:param data: body of data to supply to the specified method.
|
||||
"""
|
||||
if not method:
|
||||
raise wsme.exc.ClientSideError(_("Method not specified"))
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
http_method = pecan.request.method.upper()
|
||||
topic = pecan.request.rpcapi.get_topic_for_driver(driver_name)
|
||||
ret, is_async = pecan.request.rpcapi.driver_vendor_passthru(
|
||||
pecan.request.context, driver_name, method,
|
||||
http_method, data, topic=topic)
|
||||
status_code = 202 if is_async else 200
|
||||
return wsme.api.Response(ret, status_code=status_code)
|
||||
|
||||
|
||||
class DriversController(rest.RestController):
|
||||
"""REST controller for Drivers."""
|
||||
|
||||
vendor_passthru = DriverPassthruController()
|
||||
|
||||
_custom_actions = {
|
||||
'properties': ['GET'],
|
||||
}
|
||||
|
||||
@expose.expose(DriverList)
|
||||
def get_all(self):
|
||||
"""Retrieve a list of drivers."""
|
||||
# FIXME(deva): formatting of the auto-generated REST API docs
|
||||
# will break from a single-line doc string.
|
||||
# This is a result of a bug in sphinxcontrib-pecanwsme
|
||||
# https://github.com/dreamhost/sphinxcontrib-pecanwsme/issues/8
|
||||
driver_list = pecan.request.dbapi.get_active_driver_dict()
|
||||
return DriverList.convert_with_links(driver_list)
|
||||
|
||||
@expose.expose(Driver, wtypes.text)
|
||||
def get_one(self, driver_name):
|
||||
"""Retrieve a single driver."""
|
||||
# NOTE(russell_h): There is no way to make this more efficient than
|
||||
# retrieving a list of drivers using the current sqlalchemy schema, but
|
||||
# this path must be exposed for Pecan to route any paths we might
|
||||
# choose to expose below it.
|
||||
|
||||
driver_dict = pecan.request.dbapi.get_active_driver_dict()
|
||||
for name, hosts in driver_dict.items():
|
||||
if name == driver_name:
|
||||
return Driver.convert_with_links(name, list(hosts))
|
||||
|
||||
raise exception.DriverNotFound(driver_name=driver_name)
|
||||
|
||||
@expose.expose(wtypes.text, wtypes.text)
|
||||
def properties(self, driver_name):
|
||||
"""Retrieve property information of the given driver.
|
||||
|
||||
:param driver_name: name of the driver.
|
||||
:returns: dictionary with <property name>:<property description>
|
||||
entries.
|
||||
:raises: DriverNotFound (HTTP 404) if the driver name is invalid or
|
||||
the driver cannot be loaded.
|
||||
"""
|
||||
if driver_name not in _DRIVER_PROPERTIES:
|
||||
topic = pecan.request.rpcapi.get_topic_for_driver(driver_name)
|
||||
properties = pecan.request.rpcapi.get_driver_properties(
|
||||
pecan.request.context, driver_name, topic=topic)
|
||||
_DRIVER_PROPERTIES[driver_name] = properties
|
||||
|
||||
return _DRIVER_PROPERTIES[driver_name]
|
File diff suppressed because it is too large
Load Diff
@ -1,396 +0,0 @@
|
||||
# Copyright 2013 UnitedStack Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
import pecan
|
||||
from pecan import rest
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers import link
|
||||
from iotronic.api.controllers.v1 import collection
|
||||
from iotronic.api.controllers.v1 import types
|
||||
from iotronic.api.controllers.v1 import utils as api_utils
|
||||
from iotronic.api import expose
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic import objects
|
||||
|
||||
|
||||
class PortPatchType(types.JsonPatchType):
|
||||
|
||||
@staticmethod
|
||||
def mandatory_attrs():
|
||||
return ['/address', '/node_uuid']
|
||||
|
||||
|
||||
class Port(base.APIBase):
|
||||
"""API representation of a port.
|
||||
|
||||
This class enforces type checking and value constraints, and converts
|
||||
between the internal object model and the API representation of a port.
|
||||
"""
|
||||
|
||||
_node_uuid = None
|
||||
|
||||
def _get_node_uuid(self):
|
||||
return self._node_uuid
|
||||
|
||||
def _set_node_uuid(self, value):
|
||||
if value and self._node_uuid != value:
|
||||
try:
|
||||
# FIXME(comstud): One should only allow UUID here, but
|
||||
# there seems to be a bug in that tests are passing an
|
||||
# ID. See bug #1301046 for more details.
|
||||
node = objects.Node.get(pecan.request.context, value)
|
||||
self._node_uuid = node.uuid
|
||||
# NOTE(lucasagomes): Create the node_id attribute on-the-fly
|
||||
# to satisfy the api -> rpc object
|
||||
# conversion.
|
||||
self.node_id = node.id
|
||||
except exception.NodeNotFound as e:
|
||||
# Change error code because 404 (NotFound) is inappropriate
|
||||
# response for a POST request to create a Port
|
||||
e.code = 400 # BadRequest
|
||||
raise e
|
||||
elif value == wtypes.Unset:
|
||||
self._node_uuid = wtypes.Unset
|
||||
|
||||
uuid = types.uuid
|
||||
"""Unique UUID for this port"""
|
||||
|
||||
address = wsme.wsattr(types.macaddress, mandatory=True)
|
||||
"""MAC Address for this port"""
|
||||
|
||||
extra = {wtypes.text: types.jsontype}
|
||||
"""This port's meta data"""
|
||||
|
||||
node_uuid = wsme.wsproperty(types.uuid, _get_node_uuid, _set_node_uuid,
|
||||
mandatory=True)
|
||||
"""The UUID of the node this port belongs to"""
|
||||
|
||||
links = wsme.wsattr([link.Link], readonly=True)
|
||||
"""A list containing a self link and associated port links"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.fields = []
|
||||
fields = list(objects.Port.fields)
|
||||
# NOTE(lucasagomes): node_uuid is not part of objects.Port.fields
|
||||
# because it's an API-only attribute
|
||||
fields.append('node_uuid')
|
||||
for field in fields:
|
||||
# Skip fields we do not expose.
|
||||
if not hasattr(self, field):
|
||||
continue
|
||||
self.fields.append(field)
|
||||
setattr(self, field, kwargs.get(field, wtypes.Unset))
|
||||
|
||||
# NOTE(lucasagomes): node_id is an attribute created on-the-fly
|
||||
# by _set_node_uuid(), it needs to be present in the fields so
|
||||
# that as_dict() will contain node_id field when converting it
|
||||
# before saving it in the database.
|
||||
self.fields.append('node_id')
|
||||
setattr(self, 'node_uuid', kwargs.get('node_id', wtypes.Unset))
|
||||
|
||||
@staticmethod
|
||||
def _convert_with_links(port, url, expand=True):
|
||||
if not expand:
|
||||
port.unset_fields_except(['uuid', 'address'])
|
||||
|
||||
# never expose the node_id attribute
|
||||
port.node_id = wtypes.Unset
|
||||
|
||||
port.links = [link.Link.make_link('self', url,
|
||||
'ports', port.uuid),
|
||||
link.Link.make_link('bookmark', url,
|
||||
'ports', port.uuid,
|
||||
bookmark=True)
|
||||
]
|
||||
return port
|
||||
|
||||
@classmethod
|
||||
def convert_with_links(cls, rpc_port, expand=True):
|
||||
port = Port(**rpc_port.as_dict())
|
||||
return cls._convert_with_links(port, pecan.request.host_url, expand)
|
||||
|
||||
@classmethod
|
||||
def sample(cls, expand=True):
|
||||
sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c',
|
||||
address='fe:54:00:77:07:d9',
|
||||
extra={'foo': 'bar'},
|
||||
created_at=datetime.datetime.utcnow(),
|
||||
updated_at=datetime.datetime.utcnow())
|
||||
# NOTE(lucasagomes): node_uuid getter() method look at the
|
||||
# _node_uuid variable
|
||||
sample._node_uuid = '7ae81bb3-dec3-4289-8d6c-da80bd8001ae'
|
||||
return cls._convert_with_links(sample, 'http://localhost:6385', expand)
|
||||
|
||||
|
||||
class PortCollection(collection.Collection):
|
||||
"""API representation of a collection of ports."""
|
||||
|
||||
ports = [Port]
|
||||
"""A list containing ports objects"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self._type = 'ports'
|
||||
|
||||
@staticmethod
|
||||
def convert_with_links(rpc_ports, limit, url=None, expand=False, **kwargs):
|
||||
collection = PortCollection()
|
||||
collection.ports = [Port.convert_with_links(p, expand)
|
||||
for p in rpc_ports]
|
||||
collection.next = collection.get_next(limit, url=url, **kwargs)
|
||||
return collection
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
sample = cls()
|
||||
sample.ports = [Port.sample(expand=False)]
|
||||
return sample
|
||||
|
||||
|
||||
class PortsController(rest.RestController):
|
||||
"""REST controller for Ports."""
|
||||
|
||||
from_nodes = False
|
||||
"""A flag to indicate if the requests to this controller are coming
|
||||
from the top-level resource Nodes."""
|
||||
|
||||
_custom_actions = {
|
||||
'detail': ['GET'],
|
||||
}
|
||||
|
||||
invalid_sort_key_list = ['extra']
|
||||
|
||||
def _get_ports_collection(self, node_ident, address, marker, limit,
|
||||
sort_key, sort_dir, expand=False,
|
||||
resource_url=None):
|
||||
if self.from_nodes and not node_ident:
|
||||
raise exception.MissingParameterValue(_(
|
||||
"Node identifier not specified."))
|
||||
|
||||
limit = api_utils.validate_limit(limit)
|
||||
sort_dir = api_utils.validate_sort_dir(sort_dir)
|
||||
|
||||
marker_obj = None
|
||||
if marker:
|
||||
marker_obj = objects.Port.get_by_uuid(pecan.request.context,
|
||||
marker)
|
||||
|
||||
if sort_key in self.invalid_sort_key_list:
|
||||
raise exception.InvalidParameterValue(_(
|
||||
"The sort_key value %(key)s is an invalid field for sorting"
|
||||
) % {'key': sort_key})
|
||||
|
||||
if node_ident:
|
||||
# FIXME(comstud): Since all we need is the node ID, we can
|
||||
# make this more efficient by only querying
|
||||
# for that column. This will get cleaned up
|
||||
# as we move to the object interface.
|
||||
node = api_utils.get_rpc_node(node_ident)
|
||||
ports = objects.Port.list_by_node_id(pecan.request.context,
|
||||
node.id, limit, marker_obj,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
elif address:
|
||||
ports = self._get_ports_by_address(address)
|
||||
else:
|
||||
ports = objects.Port.list(pecan.request.context, limit,
|
||||
marker_obj, sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
|
||||
return PortCollection.convert_with_links(ports, limit,
|
||||
url=resource_url,
|
||||
expand=expand,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
|
||||
def _get_ports_by_address(self, address):
|
||||
"""Retrieve a port by its address.
|
||||
|
||||
:param address: MAC address of a port, to get the port which has
|
||||
this MAC address.
|
||||
:returns: a list with the port, or an empty list if no port is found.
|
||||
|
||||
"""
|
||||
try:
|
||||
port = objects.Port.get_by_address(pecan.request.context, address)
|
||||
return [port]
|
||||
except exception.PortNotFound:
|
||||
return []
|
||||
|
||||
@expose.expose(PortCollection, types.uuid_or_name, types.uuid,
|
||||
types.macaddress, types.uuid, int, wtypes.text,
|
||||
wtypes.text)
|
||||
def get_all(self, node=None, node_uuid=None, address=None, marker=None,
|
||||
limit=None, sort_key='id', sort_dir='asc'):
|
||||
"""Retrieve a list of ports.
|
||||
|
||||
Note that the 'node_uuid' interface is deprecated in favour
|
||||
of the 'node' interface
|
||||
|
||||
:param node: UUID or name of a node, to get only ports for that
|
||||
node.
|
||||
:param node_uuid: UUID of a node, to get only ports for that
|
||||
node.
|
||||
:param address: MAC address of a port, to get the port which has
|
||||
this MAC address.
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param sort_key: column to sort results by. Default: id.
|
||||
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
|
||||
"""
|
||||
if not node_uuid and node:
|
||||
# We're invoking this interface using positional notation, or
|
||||
# explicitly using 'node'. Try and determine which one.
|
||||
# Make sure only one interface, node or node_uuid is used
|
||||
if (not api_utils.allow_node_logical_names() and
|
||||
not uuidutils.is_uuid_like(node)):
|
||||
raise exception.NotAcceptable()
|
||||
|
||||
return self._get_ports_collection(node_uuid or node, address, marker,
|
||||
limit, sort_key, sort_dir)
|
||||
|
||||
@expose.expose(PortCollection, types.uuid_or_name, types.uuid,
|
||||
types.macaddress, types.uuid, int, wtypes.text,
|
||||
wtypes.text)
|
||||
def detail(self, node=None, node_uuid=None, address=None, marker=None,
|
||||
limit=None, sort_key='id', sort_dir='asc'):
|
||||
"""Retrieve a list of ports with detail.
|
||||
|
||||
Note that the 'node_uuid' interface is deprecated in favour
|
||||
of the 'node' interface
|
||||
|
||||
:param node: UUID or name of a node, to get only ports for that
|
||||
node.
|
||||
:param node_uuid: UUID of a node, to get only ports for that
|
||||
node.
|
||||
:param address: MAC address of a port, to get the port which has
|
||||
this MAC address.
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param sort_key: column to sort results by. Default: id.
|
||||
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
|
||||
"""
|
||||
if not node_uuid and node:
|
||||
# We're invoking this interface using positional notation, or
|
||||
# explicitly using 'node'. Try and determine which one.
|
||||
# Make sure only one interface, node or node_uuid is used
|
||||
if (not api_utils.allow_node_logical_names() and
|
||||
not uuidutils.is_uuid_like(node)):
|
||||
raise exception.NotAcceptable()
|
||||
|
||||
# NOTE(lucasagomes): /detail should only work against collections
|
||||
parent = pecan.request.path.split('/')[:-1][-1]
|
||||
if parent != "ports":
|
||||
raise exception.HTTPNotFound
|
||||
|
||||
expand = True
|
||||
resource_url = '/'.join(['ports', 'detail'])
|
||||
return self._get_ports_collection(node_uuid or node, address, marker,
|
||||
limit, sort_key, sort_dir, expand,
|
||||
resource_url)
|
||||
|
||||
@expose.expose(Port, types.uuid)
|
||||
def get_one(self, port_uuid):
|
||||
"""Retrieve information about the given port.
|
||||
|
||||
:param port_uuid: UUID of a port.
|
||||
"""
|
||||
if self.from_nodes:
|
||||
raise exception.OperationNotPermitted
|
||||
|
||||
rpc_port = objects.Port.get_by_uuid(pecan.request.context, port_uuid)
|
||||
return Port.convert_with_links(rpc_port)
|
||||
|
||||
@expose.expose(Port, body=Port, status_code=201)
|
||||
def post(self, port):
|
||||
"""Create a new port.
|
||||
|
||||
:param port: a port within the request body.
|
||||
"""
|
||||
if self.from_nodes:
|
||||
raise exception.OperationNotPermitted
|
||||
|
||||
new_port = objects.Port(pecan.request.context,
|
||||
**port.as_dict())
|
||||
new_port.create()
|
||||
# Set the HTTP Location Header
|
||||
pecan.response.location = link.build_url('ports', new_port.uuid)
|
||||
return Port.convert_with_links(new_port)
|
||||
|
||||
@wsme.validate(types.uuid, [PortPatchType])
|
||||
@expose.expose(Port, types.uuid, body=[PortPatchType])
|
||||
def patch(self, port_uuid, patch):
|
||||
"""Update an existing port.
|
||||
|
||||
:param port_uuid: UUID of a port.
|
||||
:param patch: a json PATCH document to apply to this port.
|
||||
"""
|
||||
if self.from_nodes:
|
||||
raise exception.OperationNotPermitted
|
||||
|
||||
rpc_port = objects.Port.get_by_uuid(pecan.request.context, port_uuid)
|
||||
try:
|
||||
port_dict = rpc_port.as_dict()
|
||||
# NOTE(lucasagomes):
|
||||
# 1) Remove node_id because it's an internal value and
|
||||
# not present in the API object
|
||||
# 2) Add node_uuid
|
||||
port_dict['node_uuid'] = port_dict.pop('node_id', None)
|
||||
port = Port(**api_utils.apply_jsonpatch(port_dict, patch))
|
||||
except api_utils.JSONPATCH_EXCEPTIONS as e:
|
||||
raise exception.PatchError(patch=patch, reason=e)
|
||||
|
||||
# Update only the fields that have changed
|
||||
for field in objects.Port.fields:
|
||||
try:
|
||||
patch_val = getattr(port, field)
|
||||
except AttributeError:
|
||||
# Ignore fields that aren't exposed in the API
|
||||
continue
|
||||
if patch_val == wtypes.Unset:
|
||||
patch_val = None
|
||||
if rpc_port[field] != patch_val:
|
||||
rpc_port[field] = patch_val
|
||||
|
||||
rpc_node = objects.Node.get_by_id(pecan.request.context,
|
||||
rpc_port.node_id)
|
||||
topic = pecan.request.rpcapi.get_topic_for(rpc_node)
|
||||
|
||||
new_port = pecan.request.rpcapi.update_port(
|
||||
pecan.request.context, rpc_port, topic)
|
||||
|
||||
return Port.convert_with_links(new_port)
|
||||
|
||||
@expose.expose(None, types.uuid, status_code=204)
|
||||
def delete(self, port_uuid):
|
||||
"""Delete a port.
|
||||
|
||||
:param port_uuid: UUID of a port.
|
||||
"""
|
||||
if self.from_nodes:
|
||||
raise exception.OperationNotPermitted
|
||||
rpc_port = objects.Port.get_by_uuid(pecan.request.context,
|
||||
port_uuid)
|
||||
rpc_node = objects.Node.get_by_id(pecan.request.context,
|
||||
rpc_port.node_id)
|
||||
topic = pecan.request.rpcapi.get_topic_for(rpc_node)
|
||||
pecan.request.rpcapi.destroy_port(pecan.request.context,
|
||||
rpc_port, topic)
|
@ -1,34 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from wsme import types as wtypes
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers import link
|
||||
|
||||
|
||||
class State(base.APIBase):
|
||||
|
||||
current = wtypes.text
|
||||
"""The current state"""
|
||||
|
||||
target = wtypes.text
|
||||
"""The user modified desired state"""
|
||||
|
||||
available = [wtypes.text]
|
||||
"""A list of available states it is able to transition to"""
|
||||
|
||||
links = [link.Link]
|
||||
"""A list containing a self link and associated state links"""
|
@ -1,239 +0,0 @@
|
||||
# coding: utf-8
|
||||
#
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic.common import utils
|
||||
|
||||
|
||||
class MacAddressType(wtypes.UserType):
|
||||
"""A simple MAC address type."""
|
||||
|
||||
basetype = wtypes.text
|
||||
name = 'macaddress'
|
||||
# FIXME(lucasagomes): When used with wsexpose decorator WSME will try
|
||||
# to get the name of the type by accessing it's __name__ attribute.
|
||||
# Remove this __name__ attribute once it's fixed in WSME.
|
||||
# https://bugs.launchpad.net/wsme/+bug/1265590
|
||||
__name__ = name
|
||||
|
||||
@staticmethod
|
||||
def validate(value):
|
||||
return utils.validate_and_normalize_mac(value)
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
if value is None:
|
||||
return None
|
||||
return MacAddressType.validate(value)
|
||||
|
||||
|
||||
class UuidOrNameType(wtypes.UserType):
|
||||
"""A simple UUID or logical name type."""
|
||||
|
||||
basetype = wtypes.text
|
||||
name = 'uuid_or_name'
|
||||
# FIXME(lucasagomes): When used with wsexpose decorator WSME will try
|
||||
# to get the name of the type by accessing it's __name__ attribute.
|
||||
# Remove this __name__ attribute once it's fixed in WSME.
|
||||
# https://bugs.launchpad.net/wsme/+bug/1265590
|
||||
__name__ = name
|
||||
|
||||
@staticmethod
|
||||
def validate(value):
|
||||
if not (uuidutils.is_uuid_like(value)
|
||||
or utils.is_hostname_safe(value)):
|
||||
raise exception.InvalidUuidOrName(name=value)
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
if value is None:
|
||||
return None
|
||||
return UuidOrNameType.validate(value)
|
||||
|
||||
|
||||
class NameType(wtypes.UserType):
|
||||
"""A simple logical name type."""
|
||||
|
||||
basetype = wtypes.text
|
||||
name = 'name'
|
||||
# FIXME(lucasagomes): When used with wsexpose decorator WSME will try
|
||||
# to get the name of the type by accessing it's __name__ attribute.
|
||||
# Remove this __name__ attribute once it's fixed in WSME.
|
||||
# https://bugs.launchpad.net/wsme/+bug/1265590
|
||||
__name__ = name
|
||||
|
||||
@staticmethod
|
||||
def validate(value):
|
||||
if not utils.is_hostname_safe(value):
|
||||
raise exception.InvalidName(name=value)
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
if value is None:
|
||||
return None
|
||||
return NameType.validate(value)
|
||||
|
||||
|
||||
class UuidType(wtypes.UserType):
|
||||
"""A simple UUID type."""
|
||||
|
||||
basetype = wtypes.text
|
||||
name = 'uuid'
|
||||
# FIXME(lucasagomes): When used with wsexpose decorator WSME will try
|
||||
# to get the name of the type by accessing it's __name__ attribute.
|
||||
# Remove this __name__ attribute once it's fixed in WSME.
|
||||
# https://bugs.launchpad.net/wsme/+bug/1265590
|
||||
__name__ = name
|
||||
|
||||
@staticmethod
|
||||
def validate(value):
|
||||
if not uuidutils.is_uuid_like(value):
|
||||
raise exception.InvalidUUID(uuid=value)
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
if value is None:
|
||||
return None
|
||||
return UuidType.validate(value)
|
||||
|
||||
|
||||
class BooleanType(wtypes.UserType):
|
||||
"""A simple boolean type."""
|
||||
|
||||
basetype = wtypes.text
|
||||
name = 'boolean'
|
||||
# FIXME(lucasagomes): When used with wsexpose decorator WSME will try
|
||||
# to get the name of the type by accessing it's __name__ attribute.
|
||||
# Remove this __name__ attribute once it's fixed in WSME.
|
||||
# https://bugs.launchpad.net/wsme/+bug/1265590
|
||||
__name__ = name
|
||||
|
||||
@staticmethod
|
||||
def validate(value):
|
||||
try:
|
||||
return strutils.bool_from_string(value, strict=True)
|
||||
except ValueError as e:
|
||||
# raise Invalid to return 400 (BadRequest) in the API
|
||||
raise exception.Invalid(e)
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
if value is None:
|
||||
return None
|
||||
return BooleanType.validate(value)
|
||||
|
||||
|
||||
class JsonType(wtypes.UserType):
|
||||
"""A simple JSON type."""
|
||||
|
||||
basetype = wtypes.text
|
||||
name = 'json'
|
||||
# FIXME(lucasagomes): When used with wsexpose decorator WSME will try
|
||||
# to get the name of the type by accessing it's __name__ attribute.
|
||||
# Remove this __name__ attribute once it's fixed in WSME.
|
||||
# https://bugs.launchpad.net/wsme/+bug/1265590
|
||||
__name__ = name
|
||||
|
||||
def __str__(self):
|
||||
# These are the json serializable native types
|
||||
return ' | '.join(map(str, (wtypes.text, six.integer_types, float,
|
||||
BooleanType, list, dict, None)))
|
||||
|
||||
@staticmethod
|
||||
def validate(value):
|
||||
try:
|
||||
json.dumps(value)
|
||||
except TypeError:
|
||||
raise exception.Invalid(_('%s is not JSON serializable') % value)
|
||||
else:
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
return JsonType.validate(value)
|
||||
|
||||
|
||||
macaddress = MacAddressType()
|
||||
uuid_or_name = UuidOrNameType()
|
||||
name = NameType()
|
||||
uuid = UuidType()
|
||||
boolean = BooleanType()
|
||||
# Can't call it 'json' because that's the name of the stdlib module
|
||||
jsontype = JsonType()
|
||||
|
||||
|
||||
class JsonPatchType(wtypes.Base):
|
||||
"""A complex type that represents a single json-patch operation."""
|
||||
|
||||
path = wtypes.wsattr(wtypes.StringType(pattern='^(/[\w-]+)+$'),
|
||||
mandatory=True)
|
||||
op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'),
|
||||
mandatory=True)
|
||||
value = wsme.wsattr(jsontype, default=wtypes.Unset)
|
||||
|
||||
@staticmethod
|
||||
def internal_attrs():
|
||||
"""Returns a list of internal attributes.
|
||||
|
||||
Internal attributes can't be added, replaced or removed. This
|
||||
method may be overwritten by derived class.
|
||||
|
||||
"""
|
||||
return ['/created_at', '/id', '/links', '/updated_at', '/uuid']
|
||||
|
||||
@staticmethod
|
||||
def mandatory_attrs():
|
||||
"""Retruns a list of mandatory attributes.
|
||||
|
||||
Mandatory attributes can't be removed from the document. This
|
||||
method should be overwritten by derived class.
|
||||
|
||||
"""
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def validate(patch):
|
||||
_path = '/' + patch.path.split('/')[1]
|
||||
if _path in patch.internal_attrs():
|
||||
msg = _("'%s' is an internal attribute and can not be updated")
|
||||
raise wsme.exc.ClientSideError(msg % patch.path)
|
||||
|
||||
if patch.path in patch.mandatory_attrs() and patch.op == 'remove':
|
||||
msg = _("'%s' is a mandatory attribute and can not be removed")
|
||||
raise wsme.exc.ClientSideError(msg % patch.path)
|
||||
|
||||
if patch.op != 'remove':
|
||||
if patch.value is wsme.Unset:
|
||||
msg = _("'add' and 'replace' operations needs value")
|
||||
raise wsme.exc.ClientSideError(msg)
|
||||
|
||||
ret = {'path': patch.path, 'op': patch.op}
|
||||
if patch.value is not wsme.Unset:
|
||||
ret['value'] = patch.value
|
||||
return ret
|
@ -1,107 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import jsonpatch
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import uuidutils
|
||||
import pecan
|
||||
import wsme
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic.common import utils
|
||||
from iotronic import objects
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
JSONPATCH_EXCEPTIONS = (jsonpatch.JsonPatchException,
|
||||
jsonpatch.JsonPointerException,
|
||||
KeyError)
|
||||
|
||||
|
||||
def validate_limit(limit):
|
||||
if limit is None:
|
||||
return CONF.api.max_limit
|
||||
|
||||
if limit <= 0:
|
||||
raise wsme.exc.ClientSideError(_("Limit must be positive"))
|
||||
|
||||
return min(CONF.api.max_limit, limit)
|
||||
|
||||
|
||||
def validate_sort_dir(sort_dir):
|
||||
if sort_dir not in ['asc', 'desc']:
|
||||
raise wsme.exc.ClientSideError(_("Invalid sort direction: %s. "
|
||||
"Acceptable values are "
|
||||
"'asc' or 'desc'") % sort_dir)
|
||||
return sort_dir
|
||||
|
||||
|
||||
def apply_jsonpatch(doc, patch):
|
||||
for p in patch:
|
||||
if p['op'] == 'add' and p['path'].count('/') == 1:
|
||||
if p['path'].lstrip('/') not in doc:
|
||||
msg = _('Adding a new attribute (%s) to the root of '
|
||||
' the resource is not allowed')
|
||||
raise wsme.exc.ClientSideError(msg % p['path'])
|
||||
return jsonpatch.apply_patch(doc, jsonpatch.JsonPatch(patch))
|
||||
|
||||
|
||||
def get_patch_value(patch, path):
|
||||
for p in patch:
|
||||
if p['path'] == path:
|
||||
return p['value']
|
||||
|
||||
|
||||
def allow_node_logical_names():
|
||||
# v1.5 added logical name aliases
|
||||
return pecan.request.version.minor >= 5
|
||||
|
||||
|
||||
def get_rpc_node(node_ident):
|
||||
"""Get the RPC node from the node uuid or logical name.
|
||||
|
||||
:param node_ident: the UUID or logical name of a node.
|
||||
|
||||
:returns: The RPC Node.
|
||||
:raises: InvalidUuidOrName if the name or uuid provided is not valid.
|
||||
:raises: NodeNotFound if the node is not found.
|
||||
"""
|
||||
# Check to see if the node_ident is a valid UUID. If it is, treat it
|
||||
# as a UUID.
|
||||
if uuidutils.is_uuid_like(node_ident):
|
||||
return objects.Node.get_by_uuid(pecan.request.context, node_ident)
|
||||
|
||||
# We can refer to nodes by their name, if the client supports it
|
||||
if allow_node_logical_names():
|
||||
if utils.is_hostname_safe(node_ident):
|
||||
return objects.Node.get_by_name(pecan.request.context, node_ident)
|
||||
raise exception.InvalidUuidOrName(name=node_ident)
|
||||
|
||||
# Ensure we raise the same exception as we did for the Juno release
|
||||
raise exception.NodeNotFound(node=node_ident)
|
||||
|
||||
|
||||
def is_valid_node_name(name):
|
||||
"""Determine if the provided name is a valid node name.
|
||||
|
||||
Check to see that the provided node name is valid, and isn't a UUID.
|
||||
|
||||
:param: name: the node name to check.
|
||||
:returns: True if the name is valid, False otherwise.
|
||||
"""
|
||||
return utils.is_hostname_safe(name) and (not uuidutils.is_uuid_like(name))
|
@ -41,8 +41,8 @@ class Collection(base.APIBase):
|
||||
resource_url = url or self._type
|
||||
q_args = ''.join(['%s=%s&' % (key, kwargs[key]) for key in kwargs])
|
||||
next_args = '?%(args)slimit=%(limit)d&marker=%(marker)s' % {
|
||||
'args': q_args, 'limit': limit,
|
||||
'marker': self.collection[-1].uuid}
|
||||
'args': q_args, 'limit': limit,
|
||||
'marker': self.collection[-1].uuid}
|
||||
|
||||
return link.Link.make_link('next', pecan.request.host_url,
|
||||
resource_url, next_args).href
|
||||
|
@ -1,14 +1,27 @@
|
||||
import pecan
|
||||
from wsme import types as wtypes
|
||||
import wsme
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic import objects
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class Location(base.APIBase):
|
||||
"""API representation of a location.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
longitude = wsme.wsattr(wtypes.text)
|
||||
latitude = wsme.wsattr(wtypes.text)
|
||||
altitude = wsme.wsattr(wtypes.text)
|
||||
@ -26,12 +39,12 @@ class Location(base.APIBase):
|
||||
|
||||
@staticmethod
|
||||
def convert_with_list(list):
|
||||
list_locations=[]
|
||||
list_locations = []
|
||||
for l in list:
|
||||
list_locations.append(Location(**l.as_dict()))
|
||||
return list_locations
|
||||
|
||||
'''
|
||||
'''
|
||||
class LocationCollection(collection.Collection):
|
||||
"""API representation of a collection of locations."""
|
||||
|
||||
@ -42,11 +55,11 @@ class LocationCollection(collection.Collection):
|
||||
self._type = 'locations'
|
||||
|
||||
@staticmethod
|
||||
def convert_with_locates(locations, limit, url=None, expand=False, **kwargs):
|
||||
def convert_with_locates(locations,
|
||||
limit, url=None, expand=False, **kwargs):
|
||||
collection = LocationCollection()
|
||||
collection.locations = [Location.convert_with_locates(n, expand) for n in locations]
|
||||
collection.locations = [Location.convert_with_locates(n, expand)
|
||||
for n in locations]
|
||||
collection.next = collection.get_next(limit, url=url, **kwargs)
|
||||
return collection
|
||||
'''
|
||||
|
||||
|
||||
|
@ -1,49 +1,64 @@
|
||||
from pecan import rest
|
||||
from iotronic.api import expose
|
||||
from wsme import types as wtypes
|
||||
from iotronic import objects
|
||||
from iotronic.api.controllers.v1 import types
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api.controllers.v1 import collection
|
||||
from iotronic.api.controllers.v1 import location as loc
|
||||
from iotronic.api.controllers.v1 import types
|
||||
from iotronic.api.controllers.v1 import utils as api_utils
|
||||
from iotronic.api.controllers import base
|
||||
from iotronic.api import expose
|
||||
from iotronic.common import exception
|
||||
import wsme
|
||||
from iotronic import objects
|
||||
import pecan
|
||||
import code
|
||||
from pecan import rest
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class Node(base.APIBase):
|
||||
"""API representation of a node.
|
||||
|
||||
"""
|
||||
|
||||
uuid = types.uuid
|
||||
code = wsme.wsattr(wtypes.text)
|
||||
status = wsme.wsattr(wtypes.text)
|
||||
name= wsme.wsattr(wtypes.text)
|
||||
device= wsme.wsattr(wtypes.text)
|
||||
session= wsme.wsattr(wtypes.text)
|
||||
mobile=types.boolean
|
||||
location=wsme.wsattr([loc.Location])
|
||||
extra=types.jsontype
|
||||
name = wsme.wsattr(wtypes.text)
|
||||
device = wsme.wsattr(wtypes.text)
|
||||
session = wsme.wsattr(wtypes.text)
|
||||
mobile = types.boolean
|
||||
location = wsme.wsattr([loc.Location])
|
||||
extra = types.jsontype
|
||||
|
||||
@staticmethod
|
||||
def _convert_with_locates(node, url, expand=True, show_password=True):
|
||||
|
||||
|
||||
try:
|
||||
session=objects.SessionWP({}).get_session_by_node_uuid(node.uuid,valid=True)
|
||||
node.session=session.session_id
|
||||
except:
|
||||
session = objects.SessionWP(
|
||||
{}).get_session_by_node_uuid(
|
||||
node.uuid, valid=True)
|
||||
node.session = session.session_id
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
if not expand:
|
||||
except_list = ['name', 'code', 'status','uuid','session']
|
||||
except_list = ['name', 'code', 'status', 'uuid', 'session']
|
||||
node.unset_fields_except(except_list)
|
||||
return node
|
||||
|
||||
list_loc=objects.Location({}).list_by_node_id({},node.id)
|
||||
node.location=loc.Location.convert_with_list(list_loc)
|
||||
|
||||
|
||||
list_loc = objects.Location({}).list_by_node_id({}, node.id)
|
||||
node.location = loc.Location.convert_with_list(list_loc)
|
||||
|
||||
'''
|
||||
else:
|
||||
if not show_password:
|
||||
@ -66,14 +81,14 @@ class Node(base.APIBase):
|
||||
]
|
||||
'''
|
||||
return node
|
||||
|
||||
|
||||
@classmethod
|
||||
def convert_with_locates(cls, rpc_node, expand=True):
|
||||
node = Node(**rpc_node.as_dict())
|
||||
node.id=rpc_node.id
|
||||
node.id = rpc_node.id
|
||||
return cls._convert_with_locates(node, pecan.request.host_url,
|
||||
expand,
|
||||
pecan.request.context.show_password)
|
||||
expand,
|
||||
pecan.request.context.show_password)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.fields = []
|
||||
@ -84,10 +99,11 @@ class Node(base.APIBase):
|
||||
continue
|
||||
self.fields.append(k)
|
||||
setattr(self, k, kwargs.get(k, wtypes.Unset))
|
||||
|
||||
|
||||
|
||||
class NodeCollection(collection.Collection):
|
||||
"""API representation of a collection of nodes."""
|
||||
|
||||
|
||||
nodes = [Node]
|
||||
"""A list containing nodes objects"""
|
||||
|
||||
@ -97,10 +113,13 @@ class NodeCollection(collection.Collection):
|
||||
@staticmethod
|
||||
def convert_with_locates(nodes, limit, url=None, expand=False, **kwargs):
|
||||
collection = NodeCollection()
|
||||
collection.nodes = [Node.convert_with_locates(n, expand) for n in nodes]
|
||||
collection.nodes = [
|
||||
Node.convert_with_locates(
|
||||
n, expand) for n in nodes]
|
||||
collection.next = collection.get_next(limit, url=url, **kwargs)
|
||||
return collection
|
||||
|
||||
|
||||
|
||||
class NodesController(rest.RestController):
|
||||
|
||||
invalid_sort_key_list = ['properties']
|
||||
@ -137,7 +156,7 @@ class NodesController(rest.RestController):
|
||||
nodes = objects.Node.list(pecan.request.context, limit, marker_obj,
|
||||
sort_key=sort_key, sort_dir=sort_dir,
|
||||
filters=filters)
|
||||
|
||||
|
||||
parameters = {'sort_key': sort_key, 'sort_dir': sort_dir}
|
||||
'''
|
||||
if associated:
|
||||
@ -146,10 +165,10 @@ class NodesController(rest.RestController):
|
||||
parameters['maintenance'] = maintenance
|
||||
'''
|
||||
return NodeCollection.convert_with_locates(nodes, limit,
|
||||
url=resource_url,
|
||||
expand=expand,
|
||||
**parameters)
|
||||
|
||||
url=resource_url,
|
||||
expand=expand,
|
||||
**parameters)
|
||||
|
||||
@expose.expose(NodeCollection, types.uuid, types.uuid, types.boolean,
|
||||
types.boolean, types.uuid, int, wtypes.text, wtypes.text)
|
||||
def get_all(self, chassis_uuid=None, instance_uuid=None, associated=None,
|
||||
@ -176,19 +195,17 @@ class NodesController(rest.RestController):
|
||||
associated, maintenance, marker,
|
||||
limit, sort_key, sort_dir)
|
||||
|
||||
|
||||
|
||||
@expose.expose(Node,types.uuid_or_name)
|
||||
def get(self,node_ident):
|
||||
@expose.expose(Node, types.uuid_or_name)
|
||||
def get(self, node_ident):
|
||||
"""Retrieve information about the given node.
|
||||
|
||||
:param node_ident: UUID or logical name of a node.
|
||||
"""
|
||||
rpc_node = api_utils.get_rpc_node(node_ident)
|
||||
node = Node(**rpc_node.as_dict())
|
||||
node.id=rpc_node.id
|
||||
node.id = rpc_node.id
|
||||
return Node.convert_with_locates(node)
|
||||
|
||||
|
||||
@expose.expose(None, types.uuid_or_name, status_code=204)
|
||||
def delete(self, node_ident):
|
||||
"""Delete a node.
|
||||
@ -205,11 +222,11 @@ class NodesController(rest.RestController):
|
||||
|
||||
pecan.request.rpcapi.destroy_node(pecan.request.context,
|
||||
rpc_node.uuid, topic)
|
||||
|
||||
|
||||
@expose.expose(Node, body=Node, status_code=201)
|
||||
def post(self,Node):
|
||||
def post(self, Node):
|
||||
"""Create a new Node.
|
||||
|
||||
|
||||
:param Node: a Node within the request body.
|
||||
"""
|
||||
if not Node.name:
|
||||
@ -218,25 +235,24 @@ class NodesController(rest.RestController):
|
||||
if not Node.code:
|
||||
raise exception.MissingParameterValue(
|
||||
_("Code is not specified."))
|
||||
if not Node.location:
|
||||
if not Node.location:
|
||||
raise exception.MissingParameterValue(
|
||||
_("Location is not specified."))
|
||||
|
||||
|
||||
if Node.name:
|
||||
if not api_utils.is_valid_node_name(Node.name):
|
||||
msg = _("Cannot create node with invalid name %(name)s")
|
||||
raise wsme.exc.ClientSideError(msg % {'name': Node.name},
|
||||
status_code=400)
|
||||
|
||||
status_code=400)
|
||||
|
||||
new_Node = objects.Node(pecan.request.context,
|
||||
**Node.as_dict())
|
||||
new_Node.create()
|
||||
|
||||
new_Location=objects.Location(pecan.request.context,
|
||||
**Node.location[0].as_dict())
|
||||
new_Location.node_id=new_Node.id
|
||||
new_Location.create()
|
||||
|
||||
#pecan.response.location = link.build_url('Nodes', new_Node.uuid)
|
||||
return Node.convert_with_locates(new_Node)
|
||||
|
||||
new_Location = objects.Location(pecan.request.context,
|
||||
**Node.location[0].as_dict())
|
||||
new_Location.node_id = new_Node.id
|
||||
new_Location.create()
|
||||
|
||||
# pecan.response.location = link.build_url('Nodes', new_Node.uuid)
|
||||
return Node.convert_with_locates(new_Node)
|
||||
|
@ -71,6 +71,7 @@ def allow_node_logical_names():
|
||||
# v1.5 added logical name aliases
|
||||
return pecan.request.version.minor >= 5
|
||||
|
||||
|
||||
def get_rpc_node(node_ident):
|
||||
"""Get the RPC node from the node uuid or logical name.
|
||||
|
||||
@ -94,6 +95,7 @@ def get_rpc_node(node_ident):
|
||||
# Ensure we raise the same exception as we did for the Juno release
|
||||
raise exception.NodeNotFound(node=node_ident)
|
||||
|
||||
|
||||
def is_valid_node_name(name):
|
||||
"""Determine if the provided name is a valid node name.
|
||||
|
||||
|
@ -23,7 +23,6 @@ from iotronic.common import policy
|
||||
|
||||
|
||||
from iotronic.conductor import rpcapi
|
||||
from iotronic.db import api as dbapi
|
||||
|
||||
|
||||
class ConfigHook(hooks.PecanHook):
|
||||
@ -37,8 +36,8 @@ class DBHook(hooks.PecanHook):
|
||||
"""Attach the dbapi object to the request so controllers can get to it."""
|
||||
|
||||
def before(self, state):
|
||||
|
||||
#state.request.dbapi = dbapi.get_instance()
|
||||
|
||||
# state.request.dbapi = dbapi.get_instance()
|
||||
pass
|
||||
|
||||
|
||||
@ -62,6 +61,7 @@ class ContextHook(hooks.PecanHook):
|
||||
or admin substring. Otherwise it is set to False.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, public_api_routes):
|
||||
self.public_api_routes = public_api_routes
|
||||
super(ContextHook, self).__init__()
|
||||
@ -112,6 +112,7 @@ class TrustedCallHook(hooks.PecanHook):
|
||||
tenant, domain or other administrative unit.
|
||||
|
||||
"""
|
||||
|
||||
def before(self, state):
|
||||
ctx = state.request.context
|
||||
if ctx.is_public_api:
|
||||
@ -132,6 +133,7 @@ class NoExceptionTracebackHook(hooks.PecanHook):
|
||||
# 'on_error' never fired for wsme+pecan pair. wsme @wsexpose decorator
|
||||
# catches and handles all the errors, so 'on_error' dedicated for unhandled
|
||||
# exceptions never fired.
|
||||
|
||||
def after(self, state):
|
||||
# Omit empty body. Some errors may not have body at this level yet.
|
||||
if not state.response.body:
|
||||
|
@ -31,6 +31,7 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
|
||||
for public routes in the API.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, app, conf, public_api_routes=[]):
|
||||
# TODO(mrda): Remove .xml and ensure that doesn't result in a
|
||||
# 401 Authentication Required instead of 404 Not Found
|
||||
|
@ -35,6 +35,7 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
class ParsableErrorMiddleware(object):
|
||||
"""Replace error body with something the client can parse."""
|
||||
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
@ -69,7 +70,7 @@ class ParsableErrorMiddleware(object):
|
||||
if (state['status_code'] // 100) not in (2, 3):
|
||||
req = webob.Request(environ)
|
||||
if (req.accept.best_match(['application/json', 'application/xml'])
|
||||
== 'application/xml'):
|
||||
== 'application/xml'):
|
||||
try:
|
||||
# simple check xml is valid
|
||||
body = [et.ElementTree.tostring(
|
||||
|
@ -26,6 +26,6 @@ def parse_args(argv, default_config_files=None):
|
||||
cfg.CONF(argv[1:],
|
||||
project='iotronic',
|
||||
version=version.version_info.release_string(),
|
||||
#version='2015.7',
|
||||
# version='2015.7',
|
||||
default_config_files=default_config_files)
|
||||
rpc.init(cfg.CONF)
|
||||
|
@ -122,8 +122,7 @@ def generate(argv):
|
||||
opt_list.append((ext.name, opts))
|
||||
|
||||
for pkg_name in pkg_names:
|
||||
mods = mods_by_pkg.get(pkg_name)
|
||||
mods.sort()
|
||||
mods = sorted(mods_by_pkg.get(pkg_name))
|
||||
for mod_str in mods:
|
||||
if mod_str.endswith('.__init__'):
|
||||
mod_str = mod_str[:mod_str.rfind(".")]
|
||||
|
@ -26,18 +26,20 @@ from iotronic.common import utils
|
||||
from iotronic.openstack.common import loopingcall
|
||||
|
||||
opts = [
|
||||
cfg.IntOpt('check_device_interval',
|
||||
default=1,
|
||||
help='After Iotronic has completed creating the partition table, '
|
||||
'it continues to check for activity on the attached iSCSI '
|
||||
'device status at this interval prior to copying the image'
|
||||
' to the node, in seconds'),
|
||||
cfg.IntOpt('check_device_max_retries',
|
||||
default=20,
|
||||
help='The maximum number of times to check that the device is '
|
||||
'not accessed by another process. If the device is still '
|
||||
'busy after that, the disk partitioning will be treated as'
|
||||
' having failed.'),
|
||||
cfg.IntOpt(
|
||||
'check_device_interval',
|
||||
default=1,
|
||||
help='After Iotronic has completed creating the partition table, '
|
||||
'it continues to check for activity on the attached iSCSI '
|
||||
'device status at this interval prior to copying the image'
|
||||
' to the node, in seconds'),
|
||||
cfg.IntOpt(
|
||||
'check_device_max_retries',
|
||||
default=20,
|
||||
help='The maximum number of times to check that the device is '
|
||||
'not accessed by another process. If the device is still '
|
||||
'busy after that, the disk partitioning will be treated as'
|
||||
' having failed.'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
@ -159,12 +159,15 @@ class InstanceAssociated(Conflict):
|
||||
message = _("Instance %(instance_uuid)s is already associated with a node,"
|
||||
" it cannot be associated with this other node %(node)s")
|
||||
|
||||
|
||||
class DuplicateName(Conflict):
|
||||
message = _("A node with name %(name)s already exists.")
|
||||
|
||||
|
||||
class DuplicateCode(Conflict):
|
||||
message = _("A node with code %(code)s already exists.")
|
||||
|
||||
|
||||
class InvalidUUID(Invalid):
|
||||
message = _("Expected a uuid but received %(uuid)s.")
|
||||
|
||||
@ -573,4 +576,3 @@ class PathNotFound(IotronicException):
|
||||
|
||||
class DirectoryNotWritable(IotronicException):
|
||||
message = _("Directory %(dir)s is not writable.")
|
||||
|
||||
|
@ -32,6 +32,7 @@ from iotronic.common.i18n import _
|
||||
|
||||
class _Jump(object):
|
||||
"""A FSM transition tracks this data while jumping."""
|
||||
|
||||
def __init__(self, name, on_enter, on_exit):
|
||||
self.name = name
|
||||
self.on_enter = on_enter
|
||||
@ -44,6 +45,7 @@ class FSM(object):
|
||||
This class models a state machine, and expects an outside caller to
|
||||
manually trigger the state changes one at a time by invoking process_event
|
||||
"""
|
||||
|
||||
def __init__(self, start_state=None):
|
||||
self._transitions = {}
|
||||
self._states = OrderedDict()
|
||||
|
@ -240,7 +240,6 @@ class BaseImageService(object):
|
||||
@check_image_service
|
||||
def _update(self, image_id, image_meta, data=None, method='update',
|
||||
purge_props=False):
|
||||
|
||||
"""Modify the given image with the new data.
|
||||
|
||||
:param image_id: The opaque image identifier.
|
||||
|
@ -84,8 +84,9 @@ def import_versioned_module(version, submodule=None):
|
||||
def GlanceImageService(client=None, version=1, context=None):
|
||||
module = import_versioned_module(version, 'image_service')
|
||||
service_class = getattr(module, 'GlanceImageService')
|
||||
if (context is not None and CONF.glance.auth_strategy == 'keystone'
|
||||
and not context.auth_token):
|
||||
if (context is not None
|
||||
and CONF.glance.auth_strategy == 'keystone'
|
||||
and not context.auth_token):
|
||||
context.auth_token = keystone.get_admin_auth_token()
|
||||
return service_class(client, version, context)
|
||||
|
||||
|
@ -512,7 +512,7 @@ def is_whole_disk_image(ctx, instance_info):
|
||||
else:
|
||||
# Non glance image ref
|
||||
if (not instance_info.get('kernel') and
|
||||
not instance_info.get('ramdisk')):
|
||||
not instance_info.get('ramdisk')):
|
||||
is_whole_disk_image = True
|
||||
|
||||
return is_whole_disk_image
|
||||
|
@ -23,12 +23,10 @@ path_opts = [
|
||||
cfg.StrOpt('pybasedir',
|
||||
default=os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
'../')),
|
||||
help='Directory where the iotronic python module is installed.'),
|
||||
cfg.StrOpt('bindir',
|
||||
default='$pybasedir/bin',
|
||||
help='Directory where the iotronic module is installed.'),
|
||||
cfg.StrOpt('bindir', default='$pybasedir/bin',
|
||||
help='Directory where iotronic binaries are installed.'),
|
||||
cfg.StrOpt('state_path',
|
||||
default='$pybasedir',
|
||||
cfg.StrOpt('state_path', default='$pybasedir',
|
||||
help="Top-level directory for maintaining iotronic's state."),
|
||||
]
|
||||
|
||||
|
@ -237,7 +237,7 @@ def clean_up_pxe_config(task):
|
||||
# see: https://bugs.launchpad.net/iotronic/+bug/1441710
|
||||
if CONF.pxe.ipxe_enabled:
|
||||
utils.unlink_without_raise(_get_pxe_mac_path(mac,
|
||||
delimiter=''))
|
||||
delimiter=''))
|
||||
|
||||
utils.rmtree_without_raise(os.path.join(get_root_dir(),
|
||||
task.node.uuid))
|
||||
|
@ -13,6 +13,13 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
import oslo_messaging as messaging
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from iotronic.common import context as iotronic_context
|
||||
from iotronic.common import exception
|
||||
|
||||
__all__ = [
|
||||
'init',
|
||||
'cleanup',
|
||||
@ -27,16 +34,8 @@ __all__ = [
|
||||
'TRANSPORT_ALIASES',
|
||||
]
|
||||
|
||||
from oslo_config import cfg
|
||||
import oslo_messaging as messaging
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from iotronic.common import context as iotronic_context
|
||||
from iotronic.common import exception
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
#print CONF.transport_url
|
||||
# print CONF.transport_url
|
||||
TRANSPORT = None
|
||||
NOTIFIER = None
|
||||
|
||||
@ -92,6 +91,7 @@ def get_allowed_exmods():
|
||||
|
||||
|
||||
class JsonPayloadSerializer(messaging.NoOpSerializer):
|
||||
|
||||
@staticmethod
|
||||
def serialize_entity(context, entity):
|
||||
return jsonutils.to_primitive(entity, convert_instances=True)
|
||||
@ -120,7 +120,7 @@ class RequestContextSerializer(messaging.Serializer):
|
||||
|
||||
|
||||
def get_transport_url(url_str=None):
|
||||
#LOG.info('yoooooooooooo')
|
||||
# LOG.info('yoooooooooooo')
|
||||
return messaging.TransportURL.parse(CONF, url_str, TRANSPORT_ALIASES)
|
||||
|
||||
|
||||
|
@ -117,7 +117,7 @@ class RPCService(service.Service):
|
||||
|
||||
def prepare_service(argv=[]):
|
||||
log.register_options(cfg.CONF)
|
||||
|
||||
|
||||
log.set_defaults(default_log_levels=['amqp=WARN',
|
||||
'amqplib=WARN',
|
||||
'qpid.messagregister_optionsing=INFO',
|
||||
|
@ -153,7 +153,7 @@ INSPECTFAIL = 'inspect failed'
|
||||
UPDATE_ALLOWED_STATES = (DEPLOYFAIL, INSPECTING, INSPECTFAIL, CLEANFAIL)
|
||||
"""Transitional states in which we allow updating a node."""
|
||||
|
||||
### NEW
|
||||
# NEW
|
||||
OPERATIVE = 'operative'
|
||||
MAINTENANCE = 'maintenance'
|
||||
|
||||
|
@ -257,7 +257,7 @@ def is_valid_cidr(address):
|
||||
ip_segment = address.split('/')
|
||||
|
||||
if (len(ip_segment) <= 1 or
|
||||
ip_segment[1] == ''):
|
||||
ip_segment[1] == ''):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,362 +0,0 @@
|
||||
# coding=utf-8
|
||||
|
||||
# Copyright 2013 Hewlett-Packard Development Company, L.P.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
A context manager to perform a series of tasks on a set of resources.
|
||||
|
||||
:class:`TaskManager` is a context manager, created on-demand to allow
|
||||
synchronized access to a node and its resources.
|
||||
|
||||
The :class:`TaskManager` will, by default, acquire an exclusive lock on
|
||||
a node for the duration that the TaskManager instance exists. You may
|
||||
create a TaskManager instance without locking by passing "shared=True"
|
||||
when creating it, but certain operations on the resources held by such
|
||||
an instance of TaskManager will not be possible. Requiring this exclusive
|
||||
lock guards against parallel operations interfering with each other.
|
||||
|
||||
A shared lock is useful when performing non-interfering operations,
|
||||
such as validating the driver interfaces.
|
||||
|
||||
An exclusive lock is stored in the database to coordinate between
|
||||
:class:`iotronic.iotconductor.manager` instances, that are typically deployed on
|
||||
different hosts.
|
||||
|
||||
:class:`TaskManager` methods, as well as driver methods, may be decorated to
|
||||
determine whether their invocation requires an exclusive lock.
|
||||
|
||||
The TaskManager instance exposes certain node resources and properties as
|
||||
attributes that you may access:
|
||||
|
||||
task.context
|
||||
The context passed to TaskManager()
|
||||
task.shared
|
||||
False if Node is locked, True if it is not locked. (The
|
||||
'shared' kwarg arg of TaskManager())
|
||||
task.node
|
||||
The Node object
|
||||
task.ports
|
||||
Ports belonging to the Node
|
||||
task.driver
|
||||
The Driver for the Node, or the Driver based on the
|
||||
'driver_name' kwarg of TaskManager().
|
||||
|
||||
Example usage:
|
||||
|
||||
::
|
||||
|
||||
with task_manager.acquire(context, node_id) as task:
|
||||
task.driver.power.power_on(task.node)
|
||||
|
||||
If you need to execute task-requiring code in a background thread, the
|
||||
TaskManager instance provides an interface to handle this for you, making
|
||||
sure to release resources when the thread finishes (successfully or if
|
||||
an exception occurs). Common use of this is within the Manager like so:
|
||||
|
||||
::
|
||||
|
||||
with task_manager.acquire(context, node_id) as task:
|
||||
<do some work>
|
||||
task.spawn_after(self._spawn_worker,
|
||||
utils.node_power_action, task, new_state)
|
||||
|
||||
All exceptions that occur in the current GreenThread as part of the
|
||||
spawn handling are re-raised. You can specify a hook to execute custom
|
||||
code when such exceptions occur. For example, the hook is a more elegant
|
||||
solution than wrapping the "with task_manager.acquire()" with a
|
||||
try..exception block. (Note that this hook does not handle exceptions
|
||||
raised in the background thread.):
|
||||
|
||||
::
|
||||
|
||||
def on_error(e):
|
||||
if isinstance(e, Exception):
|
||||
...
|
||||
|
||||
with task_manager.acquire(context, node_id) as task:
|
||||
<do some work>
|
||||
task.set_spawn_error_hook(on_error)
|
||||
task.spawn_after(self._spawn_worker,
|
||||
utils.node_power_action, task, new_state)
|
||||
|
||||
"""
|
||||
|
||||
import functools
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import excutils
|
||||
import retrying
|
||||
|
||||
from iotronic.common import driver_factory
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _LW
|
||||
from iotronic.common import states
|
||||
from iotronic import objects
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def require_exclusive_lock(f):
|
||||
"""Decorator to require an exclusive lock.
|
||||
|
||||
Decorated functions must take a :class:`TaskManager` as the first
|
||||
parameter. Decorated class methods should take a :class:`TaskManager`
|
||||
as the first parameter after "self".
|
||||
|
||||
"""
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
task = args[0] if isinstance(args[0], TaskManager) else args[1]
|
||||
if task.shared:
|
||||
raise exception.ExclusiveLockRequired()
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def acquire(context, node_id, shared=False, driver_name=None):
|
||||
"""Shortcut for acquiring a lock on a Node.
|
||||
|
||||
:param context: Request context.
|
||||
:param node_id: ID or UUID of node to lock.
|
||||
:param shared: Boolean indicating whether to take a shared or exclusive
|
||||
lock. Default: False.
|
||||
:param driver_name: Name of Driver. Default: None.
|
||||
:returns: An instance of :class:`TaskManager`.
|
||||
|
||||
"""
|
||||
return TaskManager(context, node_id, shared=shared,
|
||||
driver_name=driver_name)
|
||||
|
||||
|
||||
class TaskManager(object):
|
||||
"""Context manager for tasks.
|
||||
|
||||
This class wraps the locking, driver loading, and acquisition
|
||||
of related resources (eg, Node and Ports) when beginning a unit of work.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, context, node_id, shared=False, driver_name=None):
|
||||
"""Create a new TaskManager.
|
||||
|
||||
Acquire a lock on a node. The lock can be either shared or
|
||||
exclusive. Shared locks may be used for read-only or
|
||||
non-disruptive actions only, and must be considerate to what
|
||||
other threads may be doing on the same node at the same time.
|
||||
|
||||
:param context: request context
|
||||
:param node_id: ID or UUID of node to lock.
|
||||
:param shared: Boolean indicating whether to take a shared or exclusive
|
||||
lock. Default: False.
|
||||
:param driver_name: The name of the driver to load, if different
|
||||
from the Node's current driver.
|
||||
:raises: DriverNotFound
|
||||
:raises: NodeNotFound
|
||||
:raises: NodeLocked
|
||||
|
||||
"""
|
||||
|
||||
self._spawn_method = None
|
||||
self._on_error_method = None
|
||||
|
||||
self.context = context
|
||||
self.node = None
|
||||
self.shared = shared
|
||||
|
||||
self.fsm = states.machine.copy()
|
||||
|
||||
# NodeLocked exceptions can be annoying. Let's try to alleviate
|
||||
# some of that pain by retrying our lock attempts. The retrying
|
||||
# module expects a wait_fixed value in milliseconds.
|
||||
@retrying.retry(
|
||||
retry_on_exception=lambda e: isinstance(e, exception.NodeLocked),
|
||||
stop_max_attempt_number=CONF.conductor.node_locked_retry_attempts,
|
||||
wait_fixed=CONF.conductor.node_locked_retry_interval * 1000)
|
||||
def reserve_node():
|
||||
LOG.debug("Attempting to reserve node %(node)s",
|
||||
{'node': node_id})
|
||||
self.node = objects.Node.reserve(context, CONF.host, node_id)
|
||||
|
||||
try:
|
||||
if not self.shared:
|
||||
reserve_node()
|
||||
else:
|
||||
self.node = objects.Node.get(context, node_id)
|
||||
#self.ports = objects.Port.list_by_node_id(context, self.node.id)
|
||||
#self.driver = driver_factory.get_driver(driver_name or
|
||||
# self.node.driver)
|
||||
|
||||
# NOTE(deva): this handles the Juno-era NOSTATE state
|
||||
# and should be deleted after Kilo is released
|
||||
'''
|
||||
if self.node.provision_state is states.NOSTATE:
|
||||
self.node.provision_state = states.AVAILABLE
|
||||
self.node.save()
|
||||
|
||||
self.fsm.initialize(self.node.provision_state)
|
||||
'''
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception():
|
||||
self.release_resources()
|
||||
|
||||
def spawn_after(self, _spawn_method, *args, **kwargs):
|
||||
"""Call this to spawn a thread to complete the task.
|
||||
|
||||
The specified method will be called when the TaskManager instance
|
||||
exits.
|
||||
|
||||
:param _spawn_method: a method that returns a GreenThread object
|
||||
:param args: args passed to the method.
|
||||
:param kwargs: additional kwargs passed to the method.
|
||||
|
||||
"""
|
||||
self._spawn_method = _spawn_method
|
||||
self._spawn_args = args
|
||||
self._spawn_kwargs = kwargs
|
||||
|
||||
def set_spawn_error_hook(self, _on_error_method, *args, **kwargs):
|
||||
"""Create a hook to handle exceptions when spawning a task.
|
||||
|
||||
Create a hook that gets called upon an exception being raised
|
||||
from spawning a background thread to do a task.
|
||||
|
||||
:param _on_error_method: a callable object, it's first parameter
|
||||
should accept the Exception object that was raised.
|
||||
:param args: additional args passed to the callable object.
|
||||
:param kwargs: additional kwargs passed to the callable object.
|
||||
|
||||
"""
|
||||
self._on_error_method = _on_error_method
|
||||
self._on_error_args = args
|
||||
self._on_error_kwargs = kwargs
|
||||
|
||||
def release_resources(self):
|
||||
"""Unlock a node and release resources.
|
||||
|
||||
If an exclusive lock is held, unlock the node. Reset attributes
|
||||
to make it clear that this instance of TaskManager should no
|
||||
longer be accessed.
|
||||
"""
|
||||
|
||||
if not self.shared:
|
||||
try:
|
||||
if self.node:
|
||||
objects.Node.release(self.context, CONF.host, self.node.id)
|
||||
except exception.NodeNotFound:
|
||||
# squelch the exception if the node was deleted
|
||||
# within the task's context.
|
||||
pass
|
||||
self.node = None
|
||||
self.driver = None
|
||||
self.ports = None
|
||||
self.fsm = None
|
||||
|
||||
def _thread_release_resources(self, t):
|
||||
"""Thread.link() callback to release resources."""
|
||||
self.release_resources()
|
||||
|
||||
def process_event(self, event, callback=None, call_args=None,
|
||||
call_kwargs=None, err_handler=None):
|
||||
"""Process the given event for the task's current state.
|
||||
|
||||
:param event: the name of the event to process
|
||||
:param callback: optional callback to invoke upon event transition
|
||||
:param call_args: optional \*args to pass to the callback method
|
||||
:param call_kwargs: optional \**kwargs to pass to the callback method
|
||||
:param err_handler: optional error handler to invoke if the
|
||||
callback fails, eg. because there are no workers available
|
||||
(err_handler should accept arguments node, prev_prov_state, and
|
||||
prev_target_state)
|
||||
:raises: InvalidState if the event is not allowed by the associated
|
||||
state machine
|
||||
"""
|
||||
# Advance the state model for the given event. Note that this doesn't
|
||||
# alter the node in any way. This may raise InvalidState, if this event
|
||||
# is not allowed in the current state.
|
||||
self.fsm.process_event(event)
|
||||
|
||||
# stash current states in the error handler if callback is set,
|
||||
# in case we fail to get a worker from the pool
|
||||
if err_handler and callback:
|
||||
self.set_spawn_error_hook(err_handler, self.node,
|
||||
self.node.provision_state,
|
||||
self.node.target_provision_state)
|
||||
|
||||
self.node.provision_state = self.fsm.current_state
|
||||
self.node.target_provision_state = self.fsm.target_state
|
||||
|
||||
# set up the async worker
|
||||
if callback:
|
||||
# clear the error if we're going to start work in a callback
|
||||
self.node.last_error = None
|
||||
if call_args is None:
|
||||
call_args = ()
|
||||
if call_kwargs is None:
|
||||
call_kwargs = {}
|
||||
self.spawn_after(callback, *call_args, **call_kwargs)
|
||||
|
||||
# publish the state transition by saving the Node
|
||||
self.node.save()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if exc_type is None and self._spawn_method is not None:
|
||||
# Spawn a worker to complete the task
|
||||
# The linked callback below will be called whenever:
|
||||
# - background task finished with no errors.
|
||||
# - background task has crashed with exception.
|
||||
# - callback was added after the background task has
|
||||
# finished or crashed. While eventlet currently doesn't
|
||||
# schedule the new thread until the current thread blocks
|
||||
# for some reason, this is true.
|
||||
# All of the above are asserted in tests such that we'll
|
||||
# catch if eventlet ever changes this behavior.
|
||||
thread = None
|
||||
try:
|
||||
thread = self._spawn_method(*self._spawn_args,
|
||||
**self._spawn_kwargs)
|
||||
|
||||
# NOTE(comstud): Trying to use a lambda here causes
|
||||
# the callback to not occur for some reason. This
|
||||
# also makes it easier to test.
|
||||
thread.link(self._thread_release_resources)
|
||||
# Don't unlock! The unlock will occur when the
|
||||
# thread finshes.
|
||||
return
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
try:
|
||||
# Execute the on_error hook if set
|
||||
if self._on_error_method:
|
||||
self._on_error_method(e, *self._on_error_args,
|
||||
**self._on_error_kwargs)
|
||||
except Exception:
|
||||
LOG.warning(_LW("Task's on_error hook failed to "
|
||||
"call %(method)s on node %(node)s"),
|
||||
{'method': self._on_error_method.__name__,
|
||||
'node': self.node.uuid})
|
||||
|
||||
if thread is not None:
|
||||
# This means the link() failed for some
|
||||
# reason. Nuke the thread.
|
||||
thread.cancel()
|
||||
self.release_resources()
|
||||
self.release_resources()
|
@ -1,160 +0,0 @@
|
||||
# coding=utf-8
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_log import log
|
||||
from oslo_utils import excutils
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic.common.i18n import _LI
|
||||
from iotronic.common.i18n import _LW
|
||||
from iotronic.common import states
|
||||
from iotronic.conductor import task_manager
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
@task_manager.require_exclusive_lock
|
||||
def node_set_boot_device(task, device, persistent=False):
|
||||
"""Set the boot device for a node.
|
||||
|
||||
:param task: a TaskManager instance.
|
||||
:param device: Boot device. Values are vendor-specific.
|
||||
:param persistent: Whether to set next-boot, or make the change
|
||||
permanent. Default: False.
|
||||
:raises: InvalidParameterValue if the validation of the
|
||||
ManagementInterface fails.
|
||||
|
||||
"""
|
||||
if getattr(task.driver, 'management', None):
|
||||
task.driver.management.validate(task)
|
||||
task.driver.management.set_boot_device(task,
|
||||
device=device,
|
||||
persistent=persistent)
|
||||
|
||||
|
||||
@task_manager.require_exclusive_lock
|
||||
def node_power_action(task, new_state):
|
||||
"""Change power state or reset for a node.
|
||||
|
||||
Perform the requested power action if the transition is required.
|
||||
|
||||
:param task: a TaskManager instance containing the node to act on.
|
||||
:param new_state: Any power state from iotronic.common.states. If the
|
||||
state is 'REBOOT' then a reboot will be attempted, otherwise
|
||||
the node power state is directly set to 'state'.
|
||||
:raises: InvalidParameterValue when the wrong state is specified
|
||||
or the wrong driver info is specified.
|
||||
:raises: other exceptions by the node's power driver if something
|
||||
wrong occurred during the power action.
|
||||
|
||||
"""
|
||||
node = task.node
|
||||
target_state = states.POWER_ON if new_state == states.REBOOT else new_state
|
||||
|
||||
if new_state != states.REBOOT:
|
||||
try:
|
||||
curr_state = task.driver.power.get_power_state(task)
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
node['last_error'] = _(
|
||||
"Failed to change power state to '%(target)s'. "
|
||||
"Error: %(error)s") % {'target': new_state, 'error': e}
|
||||
node['target_power_state'] = states.NOSTATE
|
||||
node.save()
|
||||
|
||||
if curr_state == new_state:
|
||||
# Neither the iotronic service nor the hardware has erred. The
|
||||
# node is, for some reason, already in the requested state,
|
||||
# though we don't know why. eg, perhaps the user previously
|
||||
# requested the node POWER_ON, the network delayed those IPMI
|
||||
# packets, and they are trying again -- but the node finally
|
||||
# responds to the first request, and so the second request
|
||||
# gets to this check and stops.
|
||||
# This isn't an error, so we'll clear last_error field
|
||||
# (from previous operation), log a warning, and return.
|
||||
node['last_error'] = None
|
||||
# NOTE(dtantsur): under rare conditions we can get out of sync here
|
||||
node['power_state'] = new_state
|
||||
node['target_power_state'] = states.NOSTATE
|
||||
node.save()
|
||||
LOG.warn(_LW("Not going to change_node_power_state because "
|
||||
"current state = requested state = '%(state)s'."),
|
||||
{'state': curr_state})
|
||||
return
|
||||
|
||||
if curr_state == states.ERROR:
|
||||
# be optimistic and continue action
|
||||
LOG.warn(_LW("Driver returns ERROR power state for node %s."),
|
||||
node.uuid)
|
||||
|
||||
# Set the target_power_state and clear any last_error, if we're
|
||||
# starting a new operation. This will expose to other processes
|
||||
# and clients that work is in progress.
|
||||
if node['target_power_state'] != target_state:
|
||||
node['target_power_state'] = target_state
|
||||
node['last_error'] = None
|
||||
node.save()
|
||||
|
||||
# take power action
|
||||
try:
|
||||
if new_state != states.REBOOT:
|
||||
task.driver.power.set_power_state(task, new_state)
|
||||
else:
|
||||
task.driver.power.reboot(task)
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
node['last_error'] = _(
|
||||
"Failed to change power state to '%(target)s'. "
|
||||
"Error: %(error)s") % {'target': target_state, 'error': e}
|
||||
else:
|
||||
# success!
|
||||
node['power_state'] = target_state
|
||||
LOG.info(_LI('Successfully set node %(node)s power state to '
|
||||
'%(state)s.'),
|
||||
{'node': node.uuid, 'state': target_state})
|
||||
finally:
|
||||
node['target_power_state'] = states.NOSTATE
|
||||
node.save()
|
||||
|
||||
|
||||
@task_manager.require_exclusive_lock
|
||||
def cleanup_after_timeout(task):
|
||||
"""Cleanup deploy task after timeout.
|
||||
|
||||
:param task: a TaskManager instance.
|
||||
"""
|
||||
node = task.node
|
||||
msg = (_('Timeout reached while waiting for callback for node %s')
|
||||
% node.uuid)
|
||||
node.last_error = msg
|
||||
LOG.error(msg)
|
||||
node.save()
|
||||
|
||||
error_msg = _('Cleanup failed for node %(node)s after deploy timeout: '
|
||||
' %(error)s')
|
||||
try:
|
||||
task.driver.deploy.clean_up(task)
|
||||
except exception.IotronicException as e:
|
||||
msg = error_msg % {'node': node.uuid, 'error': e}
|
||||
LOG.error(msg)
|
||||
node.last_error = msg
|
||||
node.save()
|
||||
except Exception as e:
|
||||
msg = error_msg % {'node': node.uuid, 'error': e}
|
||||
LOG.error(msg)
|
||||
node.last_error = _('Deploy timed out, but an unhandled exception was '
|
||||
'encountered while aborting. More info may be '
|
||||
'found in the log file.')
|
||||
node.save()
|
@ -1,5 +1,4 @@
|
||||
# coding=utf-8
|
||||
from time import sleep
|
||||
|
||||
# Copyright 2013 Hewlett-Packard Development Company, L.P.
|
||||
# Copyright 2013 International Business Machines Corporation
|
||||
@ -18,8 +17,8 @@ from time import sleep
|
||||
# under the License.
|
||||
"""Conduct all activity related to bare-metal deployments.
|
||||
|
||||
A single instance of :py:class:`iotronic.iotconductor.manager.ConductorManager` is
|
||||
created within the *iotronic-conductor* process, and is responsible for
|
||||
A single instance of :py:class:`iotronic.iotconductor.manager.ConductorManager`
|
||||
is created within the *iotronic-conductor* process, and is responsible for
|
||||
performing all actions on bare metal resources (Chassis, Nodes, and Ports).
|
||||
Commands are received via RPCs. The conductor service also performs periodic
|
||||
tasks, eg. to monitor the status of active deployments.
|
||||
@ -41,36 +40,38 @@ Rebalancing this ring can trigger various actions by each conductor, such as
|
||||
building or tearing down the TFTP environment for a node, notifying Neutron of
|
||||
a change, etc.
|
||||
"""
|
||||
'''
|
||||
import collections
|
||||
|
||||
import datetime
|
||||
import eventlet
|
||||
from eventlet import greenpool
|
||||
import inspect
|
||||
import tempfile
|
||||
|
||||
from iotronic.conductor import utils
|
||||
from iotronic.db import api as dbapi
|
||||
|
||||
from iotronic.common import dhcp_factory
|
||||
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.glance_service import service_utils as glance_utils
|
||||
|
||||
from iotronic.common import hash_ring as hash
|
||||
from iotronic.common import images
|
||||
from iotronic.common import rpc
|
||||
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic.common.i18n import _LC
|
||||
from iotronic.common.i18n import _LE
|
||||
from iotronic.common.i18n import _LI
|
||||
from iotronic.common.i18n import _LW
|
||||
# from iotronic.common import driver_factory
|
||||
|
||||
|
||||
from iotronic.common import states
|
||||
from iotronic.common import swift
|
||||
from iotronic.iotconductor import task_manager
|
||||
from iotronic.iotconductor import utils
|
||||
|
||||
from iotronic.conductor import task_manager
|
||||
from iotronic import objects
|
||||
|
||||
from iotronic.openstack.common import periodic_task
|
||||
'''
|
||||
|
||||
import threading
|
||||
import eventlet
|
||||
from eventlet import greenpool
|
||||
|
||||
from iotronic.db import api as dbapi
|
||||
from oslo_config import cfg
|
||||
from oslo_db import exception as db_exception
|
||||
from oslo_concurrency import lockutils
|
||||
from oslo_config import cfg
|
||||
from oslo_db import exception as db_exception
|
||||
@ -78,21 +79,20 @@ from oslo_log import log
|
||||
import oslo_messaging as messaging
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import uuidutils
|
||||
from iotronic.conductor import utils
|
||||
from iotronic import objects
|
||||
|
||||
from iotronic.common import hash_ring as hash
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic.common.i18n import _LC
|
||||
from iotronic.common.i18n import _LE
|
||||
from iotronic.common.i18n import _LI
|
||||
from iotronic.common.i18n import _LW
|
||||
#from iotronic.common import driver_factory
|
||||
import threading
|
||||
|
||||
from iotronic.conductor import task_manager
|
||||
'''
|
||||
import collections
|
||||
|
||||
from iotronic.common import rpc
|
||||
from iotronic.common import states
|
||||
|
||||
from iotronic.iotconductor import task_manager
|
||||
from iotronic.iotconductor import utils
|
||||
|
||||
from iotronic.openstack.common import periodic_task
|
||||
from iotronic.common import exception
|
||||
'''
|
||||
|
||||
MANAGER_TOPIC = 'iotronic.conductor_manager'
|
||||
WORKER_SPAWN_lOCK = "conductor_worker_spawn"
|
||||
@ -602,10 +602,9 @@ def set_node_cleaning_steps(task):
|
||||
node.driver_internal_info = driver_internal_info
|
||||
node.clean_step = {}
|
||||
node.save()
|
||||
|
||||
|
||||
|
||||
##################### NEW
|
||||
|
||||
|
||||
# NEW
|
||||
|
||||
|
||||
class ConductorManager(periodic_task.PeriodicTasks):
|
||||
@ -623,8 +622,8 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
self.host = host
|
||||
self.topic = topic
|
||||
self.drivers = ['fake']
|
||||
#self.power_state_sync_count = collections.defaultdict(int)
|
||||
#self.notifier = rpc.get_notifier()
|
||||
# self.power_state_sync_count = collections.defaultdict(int)
|
||||
# self.notifier = rpc.get_notifier()
|
||||
'''
|
||||
def _get_driver(self, driver_name):
|
||||
"""Get the driver.
|
||||
@ -640,6 +639,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
except KeyError:
|
||||
raise exception.DriverNotFound(driver_name=driver_name)
|
||||
'''
|
||||
|
||||
def init_host(self):
|
||||
self.dbapi = dbapi.get_instance()
|
||||
|
||||
@ -655,10 +655,10 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
|
||||
# NOTE(deva): instantiating DriverFactory may raise DriverLoadError
|
||||
# or DriverNotFound
|
||||
#self._driver_factory = driver_factory.DriverFactory()
|
||||
#"""Driver factory loads all enabled drivers."""
|
||||
|
||||
#self.drivers = self._driver_factory.names
|
||||
# self._driver_factory = driver_factory.DriverFactory()
|
||||
# """Driver factory loads all enabled drivers."""
|
||||
|
||||
# self.drivers = self._driver_factory.names
|
||||
"""List of driver names which this conductor supports."""
|
||||
'''
|
||||
if not self.drivers:
|
||||
@ -678,20 +678,21 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
if iface:
|
||||
self._collect_periodic_tasks(iface)
|
||||
'''
|
||||
|
||||
|
||||
# clear all locks held by this conductor before registering
|
||||
#self.dbapi.clear_node_reservations_for_conductor(self.host)
|
||||
# self.dbapi.clear_node_reservations_for_conductor(self.host)
|
||||
try:
|
||||
# Register this conductor with the cluster
|
||||
cdr = self.dbapi.register_conductor({'hostname': self.host,'drivers': ['fake']})
|
||||
cdr = self.dbapi.register_conductor(
|
||||
{'hostname': self.host, 'drivers': ['fake']})
|
||||
except exception.ConductorAlreadyRegistered:
|
||||
# This conductor was already registered and did not shut down
|
||||
# properly, so log a warning and update the record.
|
||||
LOG.warn(_LW("A conductor with hostname %(hostname)s "
|
||||
"was previously registered. Updating registration"),
|
||||
{'hostname': self.host})
|
||||
|
||||
#TO BE CHANGED
|
||||
|
||||
# TO BE CHANGED
|
||||
cdr = self.dbapi.register_conductor({'hostname': self.host,
|
||||
'drivers': self.drivers},
|
||||
update_existing=True)
|
||||
@ -707,11 +708,9 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.critical(_LC('Failed to start keepalive'))
|
||||
self.del_host()
|
||||
|
||||
from iotronic.wamp.rpcwampserver import RPC_Wamp_Server
|
||||
RPC_Wamp_Server()
|
||||
|
||||
|
||||
# from iotronic.wamp.rpcwampserver import RPC_Wamp_Server
|
||||
# RPC_Wamp_Server()
|
||||
|
||||
def _collect_periodic_tasks(self, obj):
|
||||
for n, method in inspect.getmembers(obj, inspect.ismethod):
|
||||
@ -746,7 +745,6 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
|
||||
@lockutils.synchronized(WORKER_SPAWN_lOCK, 'iotronic-')
|
||||
def _spawn_worker(self, func, *args, **kwargs):
|
||||
|
||||
"""Create a greenthread to run func(*args, **kwargs).
|
||||
|
||||
Spawns a greenthread if there are free slots in pool, otherwise raises
|
||||
@ -1434,7 +1432,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
action=action, node=task.node.uuid,
|
||||
state=task.node.provision_state)
|
||||
|
||||
#@periodic_task.periodic_task(spacing=CONF.conductor.sync_power_state_interval)
|
||||
# @periodic_task.periodic_task(spacing=CONF.conductor.sync_power_state_interval)
|
||||
def _sync_power_states(self, context):
|
||||
"""Periodic task to sync power states for the nodes.
|
||||
|
||||
@ -1502,7 +1500,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
# Yield on every iteration
|
||||
eventlet.sleep(0)
|
||||
|
||||
#@periodic_task.periodic_task(spacing=CONF.conductor.check_provision_state_interval)
|
||||
# @periodic_task.periodic_task(spacing=CONF.conductor.check_provision_state_interval)
|
||||
def _check_deploy_timeouts(self, context):
|
||||
"""Periodically checks whether a deploy RPC call has timed out.
|
||||
|
||||
@ -1542,7 +1540,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
task.node.conductor_affinity = self.conductor.id
|
||||
task.node.save()
|
||||
|
||||
#@periodic_task.periodic_task(spacing=CONF.conductor.sync_local_state_interval)
|
||||
# @periodic_task.periodic_task(spacing=CONF.conductor.sync_local_state_interval)
|
||||
def _sync_local_state(self, context):
|
||||
"""Perform any actions necessary to sync local state.
|
||||
|
||||
@ -1616,7 +1614,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
nodes
|
||||
:return: generator yielding tuples of requested fields
|
||||
"""
|
||||
columns = ['uuid',] + list(fields or ())
|
||||
columns = ['uuid', ] + list(fields or ())
|
||||
node_list = self.dbapi.get_nodeinfo_list(columns=columns, **kwargs)
|
||||
for result in node_list:
|
||||
if self._mapped_to_this_conductor(*result[:2]):
|
||||
@ -1667,9 +1665,6 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
ret_dict[iface_name]['reason'] = reason
|
||||
return ret_dict
|
||||
|
||||
|
||||
|
||||
|
||||
@messaging.expected_exceptions(exception.NodeLocked,
|
||||
exception.NodeAssociated,
|
||||
exception.InvalidState)
|
||||
@ -1691,7 +1686,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
node.destroy()
|
||||
LOG.info(_LI('Successfully deleted node %(node)s.'),
|
||||
{'node': node.uuid})
|
||||
#if node.instance_uuid is not None:
|
||||
# if node.instance_uuid is not None:
|
||||
# raise exception.NodeAssociated(node=node.uuid,
|
||||
# instance=node.instance_uuid)
|
||||
|
||||
@ -1904,7 +1899,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
driver = self._get_driver(driver_name)
|
||||
return driver.get_properties()
|
||||
|
||||
#@periodic_task.periodic_task(spacing=CONF.conductor.send_sensor_data_interval)
|
||||
# @periodic_task.periodic_task(spacing=CONF.conductor.send_sensor_data_interval)
|
||||
def _send_sensor_data(self, context):
|
||||
"""Periodically sends sensor data to Ceilometer."""
|
||||
# do nothing if send_sensor_data option is False
|
||||
@ -2128,7 +2123,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
action='inspect', node=task.node.uuid,
|
||||
state=task.node.provision_state)
|
||||
|
||||
#@periodic_task.periodic_task(spacing=CONF.conductor.check_provision_state_interval)
|
||||
# @periodic_task.periodic_task(spacing=CONF.conductor.check_provision_state_interval)
|
||||
def _check_inspect_timeouts(self, context):
|
||||
"""Periodically checks inspect_timeout and fails upon reaching it.
|
||||
|
||||
@ -2185,7 +2180,7 @@ class ConductorManager(periodic_task.PeriodicTasks):
|
||||
try:
|
||||
with task_manager.acquire(context, node_uuid) as task:
|
||||
if (task.node.maintenance or
|
||||
task.node.provision_state != provision_state):
|
||||
task.node.provision_state != provision_state):
|
||||
continue
|
||||
|
||||
# timeout has been reached - process the event 'fail'
|
||||
|
@ -22,9 +22,7 @@ import random
|
||||
|
||||
import oslo_messaging as messaging
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common import hash_ring
|
||||
from iotronic.common.i18n import _
|
||||
from iotronic.common import rpc
|
||||
from iotronic.conductor import manager
|
||||
from iotronic.objects import base as objects_base
|
||||
@ -78,9 +76,7 @@ class ConductorAPI(object):
|
||||
'driver %s.') % node.driver)
|
||||
raise exception.NoValidHost(reason=reason)
|
||||
'''
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def get_topic_for_driver(self, driver_name):
|
||||
"""Get RPC topic name for a conductor supporting the given driver.
|
||||
|
||||
|
@ -32,8 +32,8 @@ A shared lock is useful when performing non-interfering operations,
|
||||
such as validating the driver interfaces.
|
||||
|
||||
An exclusive lock is stored in the database to coordinate between
|
||||
:class:`iotronic.iotconductor.manager` instances, that are typically deployed on
|
||||
different hosts.
|
||||
:class:`iotronic.iotconductor.manager` instances,
|
||||
that are typically deployed on different hosts.
|
||||
|
||||
:class:`TaskManager` methods, as well as driver methods, may be decorated to
|
||||
determine whether their invocation requires an exclusive lock.
|
||||
@ -101,7 +101,6 @@ from oslo_log import log as logging
|
||||
from oslo_utils import excutils
|
||||
import retrying
|
||||
|
||||
from iotronic.common import driver_factory
|
||||
from iotronic.common import exception
|
||||
from iotronic.common.i18n import _LW
|
||||
from iotronic.common import states
|
||||
@ -176,7 +175,7 @@ class TaskManager(object):
|
||||
self._on_error_method = None
|
||||
|
||||
self.context = context
|
||||
#self.node = None
|
||||
# self.node = None
|
||||
self.node = None
|
||||
self.shared = shared
|
||||
|
||||
@ -201,8 +200,8 @@ class TaskManager(object):
|
||||
else:
|
||||
"""
|
||||
self.node = objects.Node.get(context, node_id)
|
||||
#self.ports = objects.Port.list_by_node_id(context, self.node.id)
|
||||
#self.driver = driver_factory.get_driver(driver_name or
|
||||
# self.ports = objects.Port.list_by_node_id(context, self.node.id)
|
||||
# self.driver = driver_factory.get_driver(driver_name or
|
||||
# self.node.driver)
|
||||
|
||||
# NOTE(deva): this handles the Juno-era NOSTATE state
|
||||
@ -256,7 +255,7 @@ class TaskManager(object):
|
||||
to make it clear that this instance of TaskManager should no
|
||||
longer be accessed.
|
||||
"""
|
||||
pass #don't need it at the moment
|
||||
pass # don't need it at the moment
|
||||
"""
|
||||
if not self.shared:
|
||||
try:
|
||||
@ -357,7 +356,7 @@ class TaskManager(object):
|
||||
LOG.warning(_LW("Task's on_error hook failed to "
|
||||
"call %(method)s on node %(node)s"),
|
||||
{'method': self._on_error_method.__name__,
|
||||
'node': self.node.uuid})
|
||||
'node': self.node.uuid})
|
||||
|
||||
if thread is not None:
|
||||
# This means the link() failed for some
|
||||
|
@ -177,7 +177,7 @@ class Connection(object):
|
||||
:returns: A node.
|
||||
"""
|
||||
'''
|
||||
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_node_by_code(self, instance):
|
||||
"""Return a node.
|
||||
@ -343,7 +343,7 @@ class Connection(object):
|
||||
:param chassis_id: The id or the uuid of a chassis.
|
||||
"""
|
||||
'''
|
||||
|
||||
|
||||
@abc.abstractmethod
|
||||
def register_conductor(self, values, update_existing=False):
|
||||
"""Register an active conductor with the cluster.
|
||||
@ -405,9 +405,7 @@ class Connection(object):
|
||||
"""
|
||||
|
||||
|
||||
|
||||
|
||||
###################### NEW #############################
|
||||
# ##################### NEW #############################
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_session(self, values):
|
||||
@ -415,7 +413,7 @@ class Connection(object):
|
||||
|
||||
:param values: session_id.
|
||||
"""
|
||||
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_session(self, session_id, values):
|
||||
"""Update properties of an session.
|
||||
@ -423,15 +421,15 @@ class Connection(object):
|
||||
:param session_id: The id of a session.
|
||||
:param values: Dict of values to update.
|
||||
:returns: A session.
|
||||
"""
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_location(self, values):
|
||||
"""Create a new location.
|
||||
|
||||
:param values: Dict of values.
|
||||
"""
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_location(self, location_id, values):
|
||||
"""Update properties of an location.
|
||||
@ -440,8 +438,7 @@ class Connection(object):
|
||||
:param values: Dict of values to update.
|
||||
:returns: A location.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@abc.abstractmethod
|
||||
def destroy_location(self, location_id):
|
||||
"""Destroy an location.
|
||||
@ -451,7 +448,7 @@ class Connection(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_locations_by_node_id(self, node_id, limit=None, marker=None,
|
||||
sort_key=None, sort_dir=None):
|
||||
sort_key=None, sort_dir=None):
|
||||
"""List all the locations for a given node.
|
||||
|
||||
:param node_id: The integer node ID.
|
||||
|
@ -28,7 +28,7 @@ import sqlalchemy as sa
|
||||
|
||||
def upgrade():
|
||||
op.add_column('nodes', sa.Column('provision_updated_at', sa.DateTime(),
|
||||
nullable=True))
|
||||
nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
@ -103,4 +103,4 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
raise NotImplementedError(('Downgrade from initial migration is'
|
||||
' unsupported.'))
|
||||
' unsupported.'))
|
||||
|
@ -28,7 +28,7 @@ import sqlalchemy as sa
|
||||
|
||||
def upgrade():
|
||||
op.add_column('nodes', sa.Column('name', sa.String(length=63),
|
||||
nullable=True))
|
||||
nullable=True))
|
||||
op.create_unique_constraint('uniq_nodes0name', 'nodes', ['name'])
|
||||
|
||||
|
||||
|
@ -28,7 +28,7 @@ import sqlalchemy as sa
|
||||
|
||||
def upgrade():
|
||||
op.add_column('nodes', sa.Column('clean_step', sa.Text(),
|
||||
nullable=True))
|
||||
nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
@ -41,7 +41,7 @@ AVAILABLE = 'available'
|
||||
def upgrade():
|
||||
op.execute(
|
||||
node.update().where(
|
||||
node.c.provision_state == None).values(
|
||||
node.c.provision_state is None).values(
|
||||
{'provision_state': op.inline_literal(AVAILABLE)}))
|
||||
|
||||
|
||||
|
@ -149,8 +149,9 @@ def _paginate_query(model, limit=None, marker=None, sort_key=None,
|
||||
% {'key': sort_key})
|
||||
return query.all()
|
||||
|
||||
#### NEW
|
||||
|
||||
# NEW
|
||||
|
||||
|
||||
def add_location_filter_by_node(query, value):
|
||||
if strutils.is_int_like(value):
|
||||
return query.filter_by(node_id=value)
|
||||
@ -159,6 +160,7 @@ def add_location_filter_by_node(query, value):
|
||||
models.Location.node_id == models.Node.id)
|
||||
return query.filter(models.Node.uuid == value)
|
||||
|
||||
|
||||
class Connection(api.Connection):
|
||||
"""SqlAlchemy connection."""
|
||||
|
||||
@ -176,9 +178,9 @@ class Connection(api.Connection):
|
||||
query = query.filter_by(chassis_id=chassis_obj.id)
|
||||
if 'associated' in filters:
|
||||
if filters['associated']:
|
||||
query = query.filter(models.Node.instance_uuid != None)
|
||||
query = query.filter(models.Node.instance_uuid is not None)
|
||||
else:
|
||||
query = query.filter(models.Node.instance_uuid == None)
|
||||
query = query.filter(models.Node.instance_uuid is None)
|
||||
"""
|
||||
if 'reserved' in filters:
|
||||
if filters['reserved']:
|
||||
@ -264,12 +266,13 @@ class Connection(api.Connection):
|
||||
except NoResultFound:
|
||||
raise exception.NodeNotFound(node_id)
|
||||
"""
|
||||
|
||||
def create_node(self, values):
|
||||
# ensure defaults are present for new nodes
|
||||
if 'uuid' not in values:
|
||||
values['uuid'] = uuidutils.generate_uuid()
|
||||
if 'status' not in values:
|
||||
values['status'] = states.OPERATIVE
|
||||
values['status'] = states.OPERATIVE
|
||||
|
||||
node = models.Node()
|
||||
node.update(values)
|
||||
@ -301,7 +304,7 @@ class Connection(api.Connection):
|
||||
return query.one()
|
||||
except NoResultFound:
|
||||
raise exception.NodeNotFound(node=node_name)
|
||||
|
||||
|
||||
def get_node_by_code(self, node_code):
|
||||
query = model_query(models.Node).filter_by(code=node_code)
|
||||
try:
|
||||
@ -323,8 +326,9 @@ class Connection(api.Connection):
|
||||
|
||||
return result
|
||||
'''
|
||||
|
||||
def destroy_node(self, node_id):
|
||||
|
||||
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(models.Node, session=session)
|
||||
@ -338,9 +342,10 @@ class Connection(api.Connection):
|
||||
# required for deleting all ports, attached to the node.
|
||||
if uuidutils.is_uuid_like(node_id):
|
||||
node_id = node_ref['id']
|
||||
|
||||
|
||||
location_query = model_query(models.Location, session=session)
|
||||
location_query = add_location_filter_by_node(location_query, node_id)
|
||||
location_query = add_location_filter_by_node(
|
||||
location_query, node_id)
|
||||
location_query.delete()
|
||||
|
||||
query.delete()
|
||||
@ -366,7 +371,7 @@ class Connection(api.Connection):
|
||||
|
||||
query.delete()
|
||||
"""
|
||||
|
||||
|
||||
def update_node(self, node_id, values):
|
||||
# NOTE(dtantsur): this can lead to very strange errors
|
||||
if 'uuid' in values:
|
||||
@ -560,6 +565,7 @@ class Connection(api.Connection):
|
||||
if count != 1:
|
||||
raise exception.ChassisNotFound(chassis=chassis_id)
|
||||
"""
|
||||
|
||||
def register_conductor(self, values, update_existing=False):
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
@ -641,8 +647,7 @@ class Connection(api.Connection):
|
||||
return d2c
|
||||
|
||||
|
||||
|
||||
###################### NEW #############################
|
||||
# ##################### NEW #############################
|
||||
def create_session(self, values):
|
||||
session = models.SessionWP()
|
||||
session.update(values)
|
||||
@ -667,7 +672,7 @@ class Connection(api.Connection):
|
||||
location.update(values)
|
||||
location.save()
|
||||
return location
|
||||
|
||||
|
||||
def update_location(self, location_id, values):
|
||||
# NOTE(dtantsur): this can lead to very strange errors
|
||||
session = get_session()
|
||||
@ -680,7 +685,7 @@ class Connection(api.Connection):
|
||||
except NoResultFound:
|
||||
raise exception.LocationNotFound(location=location_id)
|
||||
return ref
|
||||
|
||||
|
||||
def destroy_location(self, location_id):
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
@ -689,24 +694,27 @@ class Connection(api.Connection):
|
||||
count = query.delete()
|
||||
if count == 0:
|
||||
raise exception.LocationNotFound(location=location_id)
|
||||
|
||||
|
||||
def get_locations_by_node_id(self, node_id, limit=None, marker=None,
|
||||
sort_key=None, sort_dir=None):
|
||||
sort_key=None, sort_dir=None):
|
||||
query = model_query(models.Location)
|
||||
query = query.filter_by(node_id=node_id)
|
||||
return _paginate_query(models.Location, limit, marker,
|
||||
sort_key, sort_dir, query)
|
||||
|
||||
|
||||
def get_session_by_node_uuid(self, node_uuid, valid):
|
||||
query = model_query(models.SessionWP).filter_by(node_uuid=node_uuid).filter_by(valid=valid)
|
||||
query = model_query(
|
||||
models.SessionWP).filter_by(
|
||||
node_uuid=node_uuid).filter_by(
|
||||
valid=valid)
|
||||
try:
|
||||
return query.one()
|
||||
except NoResultFound:
|
||||
return None
|
||||
|
||||
|
||||
def get_session_by_session_id(self, session_id):
|
||||
query = model_query(models.SessionWP).filter_by(session_id=session_id)
|
||||
try:
|
||||
return query.one()
|
||||
except NoResultFound:
|
||||
return None
|
||||
return None
|
||||
|
@ -24,12 +24,13 @@ from oslo_config import cfg
|
||||
from oslo_db import options as db_options
|
||||
from oslo_db.sqlalchemy import models
|
||||
import six.moves.urllib.parse as urlparse
|
||||
from sqlalchemy import Boolean, Column, DateTime
|
||||
from sqlalchemy import Boolean
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy import ForeignKey, Integer
|
||||
from sqlalchemy import schema, String, Text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import schema
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.types import TypeDecorator, TEXT
|
||||
|
||||
from iotronic.common import paths
|
||||
|
||||
|
||||
@ -39,7 +40,8 @@ sql_opts = [
|
||||
help='MySQL engine to use.')
|
||||
]
|
||||
|
||||
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('iotronic.sqlite')
|
||||
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + \
|
||||
paths.state_path_def('iotronic.sqlite')
|
||||
|
||||
|
||||
cfg.CONF.register_opts(sql_opts, 'database')
|
||||
@ -89,7 +91,7 @@ class JSONEncodedList(JsonEncodedType):
|
||||
|
||||
|
||||
class IotronicBase(models.TimestampMixin,
|
||||
models.ModelBase):
|
||||
models.ModelBase):
|
||||
|
||||
metadata = None
|
||||
|
||||
@ -142,7 +144,7 @@ class Node(Base):
|
||||
"""Represents a Node."""
|
||||
|
||||
__tablename__ = 'nodes'
|
||||
|
||||
|
||||
__table_args__ = (
|
||||
schema.UniqueConstraint('uuid', name='uniq_nodes0uuid'),
|
||||
schema.UniqueConstraint('code', name='uniq_nodes0code'),
|
||||
@ -155,8 +157,7 @@ class Node(Base):
|
||||
device = Column(String(255))
|
||||
session = Column(String(255), nullable=True)
|
||||
mobile = Column(Boolean, default=False)
|
||||
#location = Column(JSONEncodedDict)
|
||||
extra = Column(JSONEncodedDict)
|
||||
extra = Column(JSONEncodedDict)
|
||||
"""
|
||||
__tablename__ = 'nodes'
|
||||
'''
|
||||
@ -212,6 +213,7 @@ class Node(Base):
|
||||
#extra = Column(JSONEncodedDict)
|
||||
"""
|
||||
|
||||
|
||||
class Location(Base):
|
||||
"""Represents a location of a node."""
|
||||
|
||||
@ -224,13 +226,18 @@ class Location(Base):
|
||||
altitude = Column(String(18), nullable=True)
|
||||
node_id = Column(Integer, ForeignKey('nodes.id'))
|
||||
|
||||
|
||||
class SessionWP(Base):
|
||||
"""Represents a session of a node."""
|
||||
|
||||
__tablename__ = 'sessions'
|
||||
__table_args__ = (
|
||||
schema.UniqueConstraint('session_id', name='uniq_session_id0session_id'),
|
||||
schema.UniqueConstraint('node_uuid', name='uniq_node_uuid0node_uuid'),
|
||||
schema.UniqueConstraint(
|
||||
'session_id',
|
||||
name='uniq_session_id0session_id'),
|
||||
schema.UniqueConstraint(
|
||||
'node_uuid',
|
||||
name='uniq_node_uuid0node_uuid'),
|
||||
table_args())
|
||||
id = Column(Integer, primary_key=True)
|
||||
valid = Column(Boolean, default=True)
|
||||
@ -238,6 +245,7 @@ class SessionWP(Base):
|
||||
node_uuid = Column(String(36))
|
||||
node_id = Column(Integer, ForeignKey('nodes.id'))
|
||||
|
||||
|
||||
class Port(Base):
|
||||
"""Represents a network port of a bare metal node."""
|
||||
|
||||
@ -251,4 +259,3 @@ class Port(Base):
|
||||
address = Column(String(18))
|
||||
node_id = Column(Integer, ForeignKey('nodes.id'), nullable=True)
|
||||
extra = Column(JSONEncodedDict)
|
||||
|
||||
|
@ -12,26 +12,26 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
#from iotronic.objects import chassis
|
||||
# from iotronic.objects import chassis
|
||||
from iotronic.objects import conductor
|
||||
from iotronic.objects import node
|
||||
from iotronic.objects import location
|
||||
from iotronic.objects import node
|
||||
from iotronic.objects import sessionwp
|
||||
#from iotronic.objects import port
|
||||
# from iotronic.objects import port
|
||||
|
||||
|
||||
#Chassis = chassis.Chassis
|
||||
# Chassis = chassis.Chassis
|
||||
Conductor = conductor.Conductor
|
||||
Node = node.Node
|
||||
Location = location.Location
|
||||
SessionWP=sessionwp.SessionWP
|
||||
#Port = port.Port
|
||||
SessionWP = sessionwp.SessionWP
|
||||
# Port = port.Port
|
||||
|
||||
__all__ = (
|
||||
#Chassis,
|
||||
Conductor,
|
||||
Node,
|
||||
Location,
|
||||
SessionWP,
|
||||
#Port
|
||||
)
|
||||
# Chassis,
|
||||
Conductor,
|
||||
Node,
|
||||
Location,
|
||||
SessionWP,
|
||||
# Port
|
||||
)
|
||||
|
@ -1,186 +0,0 @@
|
||||
# coding=utf-8
|
||||
#
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.db import api as dbapi
|
||||
from iotronic.objects import base
|
||||
from iotronic.objects import utils as obj_utils
|
||||
|
||||
|
||||
class Chassis(base.IotronicObject):
|
||||
# Version 1.0: Initial version
|
||||
# Version 1.1: Add get() and get_by_id() and make get_by_uuid()
|
||||
# only work with a uuid
|
||||
# Version 1.2: Add create() and destroy()
|
||||
# Version 1.3: Add list()
|
||||
VERSION = '1.3'
|
||||
|
||||
dbapi = dbapi.get_instance()
|
||||
|
||||
fields = {
|
||||
'id': int,
|
||||
'uuid': obj_utils.str_or_none,
|
||||
'extra': obj_utils.dict_or_none,
|
||||
'description': obj_utils.str_or_none,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _from_db_object(chassis, db_chassis):
|
||||
"""Converts a database entity to a formal :class:`Chassis` object.
|
||||
|
||||
:param chassis: An object of :class:`Chassis`.
|
||||
:param db_chassis: A DB model of a chassis.
|
||||
:return: a :class:`Chassis` object.
|
||||
"""
|
||||
for field in chassis.fields:
|
||||
chassis[field] = db_chassis[field]
|
||||
|
||||
chassis.obj_reset_changes()
|
||||
return chassis
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get(cls, context, chassis_id):
|
||||
"""Find a chassis based on its id or uuid and return a Chassis object.
|
||||
|
||||
:param chassis_id: the id *or* uuid of a chassis.
|
||||
:returns: a :class:`Chassis` object.
|
||||
"""
|
||||
if strutils.is_int_like(chassis_id):
|
||||
return cls.get_by_id(context, chassis_id)
|
||||
elif uuidutils.is_uuid_like(chassis_id):
|
||||
return cls.get_by_uuid(context, chassis_id)
|
||||
else:
|
||||
raise exception.InvalidIdentity(identity=chassis_id)
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_id(cls, context, chassis_id):
|
||||
"""Find a chassis based on its integer id and return a Chassis object.
|
||||
|
||||
:param chassis_id: the id of a chassis.
|
||||
:returns: a :class:`Chassis` object.
|
||||
"""
|
||||
db_chassis = cls.dbapi.get_chassis_by_id(chassis_id)
|
||||
chassis = Chassis._from_db_object(cls(context), db_chassis)
|
||||
return chassis
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_uuid(cls, context, uuid):
|
||||
"""Find a chassis based on uuid and return a :class:`Chassis` object.
|
||||
|
||||
:param uuid: the uuid of a chassis.
|
||||
:param context: Security context
|
||||
:returns: a :class:`Chassis` object.
|
||||
"""
|
||||
db_chassis = cls.dbapi.get_chassis_by_uuid(uuid)
|
||||
chassis = Chassis._from_db_object(cls(context), db_chassis)
|
||||
return chassis
|
||||
|
||||
@base.remotable_classmethod
|
||||
def list(cls, context, limit=None, marker=None,
|
||||
sort_key=None, sort_dir=None):
|
||||
"""Return a list of Chassis objects.
|
||||
|
||||
:param context: Security context.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param sort_key: column to sort results by.
|
||||
:param sort_dir: direction to sort. "asc" or "desc".
|
||||
:returns: a list of :class:`Chassis` object.
|
||||
|
||||
"""
|
||||
db_chassis = cls.dbapi.get_chassis_list(limit=limit,
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return [Chassis._from_db_object(cls(context), obj)
|
||||
for obj in db_chassis]
|
||||
|
||||
@base.remotable
|
||||
def create(self, context=None):
|
||||
"""Create a Chassis record in the DB.
|
||||
|
||||
Column-wise updates will be made based on the result of
|
||||
self.what_changed(). If target_power_state is provided,
|
||||
it will be checked against the in-database copy of the
|
||||
chassis before updates are made.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Chassis(context)
|
||||
|
||||
"""
|
||||
values = self.obj_get_changes()
|
||||
db_chassis = self.dbapi.create_chassis(values)
|
||||
self._from_db_object(self, db_chassis)
|
||||
|
||||
@base.remotable
|
||||
def destroy(self, context=None):
|
||||
"""Delete the Chassis from the DB.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Chassis(context)
|
||||
"""
|
||||
self.dbapi.destroy_chassis(self.uuid)
|
||||
self.obj_reset_changes()
|
||||
|
||||
@base.remotable
|
||||
def save(self, context=None):
|
||||
"""Save updates to this Chassis.
|
||||
|
||||
Updates will be made column by column based on the result
|
||||
of self.what_changed().
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Chassis(context)
|
||||
"""
|
||||
updates = self.obj_get_changes()
|
||||
self.dbapi.update_chassis(self.uuid, updates)
|
||||
|
||||
self.obj_reset_changes()
|
||||
|
||||
@base.remotable
|
||||
def refresh(self, context=None):
|
||||
"""Loads and applies updates for this Chassis.
|
||||
|
||||
Loads a :class:`Chassis` with the same uuid from the database and
|
||||
checks for updated attributes. Updates are applied from
|
||||
the loaded chassis column by column, if there are any updates.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Chassis(context)
|
||||
"""
|
||||
current = self.__class__.get_by_uuid(self._context, uuid=self.uuid)
|
||||
for field in self.fields:
|
||||
if (hasattr(self, base.get_attrname(field)) and
|
||||
self[field] != current[field]):
|
||||
self[field] = current[field]
|
@ -1,272 +0,0 @@
|
||||
# coding=utf-8
|
||||
#
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.db import api as db_api
|
||||
from iotronic.objects import base
|
||||
from iotronic.objects import utils as obj_utils
|
||||
|
||||
|
||||
class Node(base.IotronicObject):
|
||||
# Version 1.0: Initial version
|
||||
VERSION = '1.0'
|
||||
|
||||
dbapi = db_api.get_instance()
|
||||
|
||||
fields = {
|
||||
'id': int,
|
||||
|
||||
'uuid': obj_utils.str_or_none,
|
||||
'name': obj_utils.str_or_none,
|
||||
'status': obj_utils.str_or_none,
|
||||
#'chassis_id': obj_utils.int_or_none,
|
||||
#'instance_uuid': obj_utils.str_or_none,
|
||||
|
||||
#'driver': obj_utils.str_or_none,
|
||||
#'driver_info': obj_utils.dict_or_none,
|
||||
#'driver_internal_info': obj_utils.dict_or_none,
|
||||
|
||||
# A clean step dictionary, indicating the current clean step
|
||||
# being executed, or None, indicating cleaning is not in progress
|
||||
# or has not yet started.
|
||||
#'clean_step': obj_utils.dict_or_none,
|
||||
|
||||
#'instance_info': obj_utils.dict_or_none,
|
||||
#'properties': obj_utils.dict_or_none,
|
||||
'reservation': obj_utils.str_or_none,
|
||||
# a reference to the id of the conductor service, not its hostname,
|
||||
# that has most recently performed some action which could require
|
||||
# local state to be maintained (eg, built a PXE config)
|
||||
#'conductor_affinity': obj_utils.int_or_none,
|
||||
|
||||
# One of states.POWER_ON|POWER_OFF|NOSTATE|ERROR
|
||||
#'power_state': obj_utils.str_or_none,
|
||||
|
||||
# Set to one of states.POWER_ON|POWER_OFF when a power operation
|
||||
# starts, and set to NOSTATE when the operation finishes
|
||||
# (successfully or unsuccessfully).
|
||||
#'target_power_state': obj_utils.str_or_none,
|
||||
|
||||
#'provision_state': obj_utils.str_or_none,
|
||||
#'provision_updated_at': obj_utils.datetime_or_str_or_none,
|
||||
#'target_provision_state': obj_utils.str_or_none,
|
||||
|
||||
#'maintenance': bool,
|
||||
#'maintenance_reason': obj_utils.str_or_none,
|
||||
#'console_enabled': bool,
|
||||
|
||||
# Any error from the most recent (last) asynchronous transaction
|
||||
# that started but failed to finish.
|
||||
#'last_error': obj_utils.str_or_none,
|
||||
|
||||
#'inspection_finished_at': obj_utils.datetime_or_str_or_none,
|
||||
#'inspection_started_at': obj_utils.datetime_or_str_or_none,
|
||||
|
||||
#'extra': obj_utils.dict_or_none,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _from_db_object(node, db_node):
|
||||
"""Converts a database entity to a formal object."""
|
||||
for field in node.fields:
|
||||
node[field] = db_node[field]
|
||||
node.obj_reset_changes()
|
||||
return node
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get(cls, context, node_id):
|
||||
"""Find a node based on its id or uuid and return a Node object.
|
||||
|
||||
:param node_id: the id *or* uuid of a node.
|
||||
:returns: a :class:`Node` object.
|
||||
"""
|
||||
if strutils.is_int_like(node_id):
|
||||
return cls.get_by_id(context, node_id)
|
||||
elif uuidutils.is_uuid_like(node_id):
|
||||
return cls.get_by_uuid(context, node_id)
|
||||
else:
|
||||
raise exception.InvalidIdentity(identity=node_id)
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_id(cls, context, node_id):
|
||||
"""Find a node based on its integer id and return a Node object.
|
||||
|
||||
:param node_id: the id of a node.
|
||||
:returns: a :class:`Node` object.
|
||||
"""
|
||||
db_node = cls.dbapi.get_node_by_id(node_id)
|
||||
node = Node._from_db_object(cls(context), db_node)
|
||||
return node
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_uuid(cls, context, uuid):
|
||||
"""Find a node based on uuid and return a Node object.
|
||||
|
||||
:param uuid: the uuid of a node.
|
||||
:returns: a :class:`Node` object.
|
||||
"""
|
||||
db_node = cls.dbapi.get_node_by_uuid(uuid)
|
||||
node = Node._from_db_object(cls(context), db_node)
|
||||
return node
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_name(cls, context, name):
|
||||
"""Find a node based on name and return a Node object.
|
||||
|
||||
:param name: the logical name of a node.
|
||||
:returns: a :class:`Node` object.
|
||||
"""
|
||||
db_node = cls.dbapi.get_node_by_name(name)
|
||||
node = Node._from_db_object(cls(context), db_node)
|
||||
return node
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_instance_uuid(cls, context, instance_uuid):
|
||||
"""Find a node based on the instance uuid and return a Node object.
|
||||
|
||||
:param uuid: the uuid of the instance.
|
||||
:returns: a :class:`Node` object.
|
||||
"""
|
||||
db_node = cls.dbapi.get_node_by_instance(instance_uuid)
|
||||
node = Node._from_db_object(cls(context), db_node)
|
||||
return node
|
||||
|
||||
@base.remotable_classmethod
|
||||
def list(cls, context, limit=None, marker=None, sort_key=None,
|
||||
sort_dir=None, filters=None):
|
||||
"""Return a list of Node objects.
|
||||
|
||||
:param context: Security context.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param sort_key: column to sort results by.
|
||||
:param sort_dir: direction to sort. "asc" or "desc".
|
||||
:param filters: Filters to apply.
|
||||
:returns: a list of :class:`Node` object.
|
||||
|
||||
"""
|
||||
db_nodes = cls.dbapi.get_node_list(filters=filters, limit=limit,
|
||||
marker=marker, sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return [Node._from_db_object(cls(context), obj) for obj in db_nodes]
|
||||
|
||||
@base.remotable_classmethod
|
||||
def reserve(cls, context, tag, node_id):
|
||||
"""Get and reserve a node.
|
||||
|
||||
To prevent other ManagerServices from manipulating the given
|
||||
Node while a Task is performed, mark it reserved by this host.
|
||||
|
||||
:param context: Security context.
|
||||
:param tag: A string uniquely identifying the reservation holder.
|
||||
:param node_id: A node id or uuid.
|
||||
:raises: NodeNotFound if the node is not found.
|
||||
:returns: a :class:`Node` object.
|
||||
|
||||
"""
|
||||
db_node = cls.dbapi.reserve_node(tag, node_id)
|
||||
node = Node._from_db_object(cls(context), db_node)
|
||||
return node
|
||||
|
||||
@base.remotable_classmethod
|
||||
def release(cls, context, tag, node_id):
|
||||
"""Release the reservation on a node.
|
||||
|
||||
:param context: Security context.
|
||||
:param tag: A string uniquely identifying the reservation holder.
|
||||
:param node_id: A node id or uuid.
|
||||
:raises: NodeNotFound if the node is not found.
|
||||
|
||||
"""
|
||||
cls.dbapi.release_node(tag, node_id)
|
||||
|
||||
@base.remotable
|
||||
def create(self, context=None):
|
||||
"""Create a Node record in the DB.
|
||||
|
||||
Column-wise updates will be made based on the result of
|
||||
self.what_changed(). If target_power_state is provided,
|
||||
it will be checked against the in-database copy of the
|
||||
node before updates are made.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Node(context)
|
||||
|
||||
"""
|
||||
values = self.obj_get_changes()
|
||||
db_node = self.dbapi.create_node(values)
|
||||
self._from_db_object(self, db_node)
|
||||
|
||||
@base.remotable
|
||||
def destroy(self, context=None):
|
||||
"""Delete the Node from the DB.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Node(context)
|
||||
"""
|
||||
self.dbapi.destroy_node(self.uuid)
|
||||
self.obj_reset_changes()
|
||||
|
||||
@base.remotable
|
||||
def save(self, context=None):
|
||||
"""Save updates to this Node.
|
||||
|
||||
Column-wise updates will be made based on the result of
|
||||
self.what_changed(). If target_power_state is provided,
|
||||
it will be checked against the in-database copy of the
|
||||
node before updates are made.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Node(context)
|
||||
"""
|
||||
updates = self.obj_get_changes()
|
||||
if 'driver' in updates and 'driver_internal_info' not in updates:
|
||||
# Clean driver_internal_info when changes driver
|
||||
self.driver_internal_info = {}
|
||||
updates = self.obj_get_changes()
|
||||
self.dbapi.update_node(self.uuid, updates)
|
||||
self.obj_reset_changes()
|
||||
|
||||
@base.remotable
|
||||
def refresh(self, context=None):
|
||||
"""Refresh the object by re-fetching from the DB.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Node(context)
|
||||
"""
|
||||
current = self.__class__.get_by_uuid(self._context, self.uuid)
|
||||
for field in self.fields:
|
||||
if (hasattr(self, base.get_attrname(field)) and
|
||||
self[field] != current[field]):
|
||||
self[field] = current[field]
|
@ -1,217 +0,0 @@
|
||||
# coding=utf-8
|
||||
#
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common import utils
|
||||
from iotronic.db import api as dbapi
|
||||
from iotronic.objects import base
|
||||
from iotronic.objects import utils as obj_utils
|
||||
|
||||
|
||||
class Port(base.IotronicObject):
|
||||
# Version 1.0: Initial version
|
||||
# Version 1.1: Add get() and get_by_id() and get_by_address() and
|
||||
# make get_by_uuid() only work with a uuid
|
||||
# Version 1.2: Add create() and destroy()
|
||||
# Version 1.3: Add list()
|
||||
# Version 1.4: Add list_by_node_id()
|
||||
VERSION = '1.4'
|
||||
|
||||
dbapi = dbapi.get_instance()
|
||||
|
||||
fields = {
|
||||
'id': int,
|
||||
'uuid': obj_utils.str_or_none,
|
||||
'node_id': obj_utils.int_or_none,
|
||||
'address': obj_utils.str_or_none,
|
||||
'extra': obj_utils.dict_or_none,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _from_db_object(port, db_port):
|
||||
"""Converts a database entity to a formal object."""
|
||||
for field in port.fields:
|
||||
port[field] = db_port[field]
|
||||
|
||||
port.obj_reset_changes()
|
||||
return port
|
||||
|
||||
@staticmethod
|
||||
def _from_db_object_list(db_objects, cls, context):
|
||||
"""Converts a list of database entities to a list of formal objects."""
|
||||
return [Port._from_db_object(cls(context), obj) for obj in db_objects]
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get(cls, context, port_id):
|
||||
"""Find a port based on its id or uuid and return a Port object.
|
||||
|
||||
:param port_id: the id *or* uuid of a port.
|
||||
:returns: a :class:`Port` object.
|
||||
"""
|
||||
if strutils.is_int_like(port_id):
|
||||
return cls.get_by_id(context, port_id)
|
||||
elif uuidutils.is_uuid_like(port_id):
|
||||
return cls.get_by_uuid(context, port_id)
|
||||
elif utils.is_valid_mac(port_id):
|
||||
return cls.get_by_address(context, port_id)
|
||||
else:
|
||||
raise exception.InvalidIdentity(identity=port_id)
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_id(cls, context, port_id):
|
||||
"""Find a port based on its integer id and return a Port object.
|
||||
|
||||
:param port_id: the id of a port.
|
||||
:returns: a :class:`Port` object.
|
||||
"""
|
||||
db_port = cls.dbapi.get_port_by_id(port_id)
|
||||
port = Port._from_db_object(cls(context), db_port)
|
||||
return port
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_uuid(cls, context, uuid):
|
||||
"""Find a port based on uuid and return a :class:`Port` object.
|
||||
|
||||
:param uuid: the uuid of a port.
|
||||
:param context: Security context
|
||||
:returns: a :class:`Port` object.
|
||||
"""
|
||||
db_port = cls.dbapi.get_port_by_uuid(uuid)
|
||||
port = Port._from_db_object(cls(context), db_port)
|
||||
return port
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_address(cls, context, address):
|
||||
"""Find a port based on address and return a :class:`Port` object.
|
||||
|
||||
:param address: the address of a port.
|
||||
:param context: Security context
|
||||
:returns: a :class:`Port` object.
|
||||
"""
|
||||
db_port = cls.dbapi.get_port_by_address(address)
|
||||
port = Port._from_db_object(cls(context), db_port)
|
||||
return port
|
||||
|
||||
@base.remotable_classmethod
|
||||
def list(cls, context, limit=None, marker=None,
|
||||
sort_key=None, sort_dir=None):
|
||||
"""Return a list of Port objects.
|
||||
|
||||
:param context: Security context.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param sort_key: column to sort results by.
|
||||
:param sort_dir: direction to sort. "asc" or "desc".
|
||||
:returns: a list of :class:`Port` object.
|
||||
|
||||
"""
|
||||
db_ports = cls.dbapi.get_port_list(limit=limit,
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return Port._from_db_object_list(db_ports, cls, context)
|
||||
|
||||
@base.remotable_classmethod
|
||||
def list_by_node_id(cls, context, node_id, limit=None, marker=None,
|
||||
sort_key=None, sort_dir=None):
|
||||
"""Return a list of Port objects associated with a given node ID.
|
||||
|
||||
:param context: Security context.
|
||||
:param node_id: the ID of the node.
|
||||
:param limit: maximum number of resources to return in a single result.
|
||||
:param marker: pagination marker for large data sets.
|
||||
:param sort_key: column to sort results by.
|
||||
:param sort_dir: direction to sort. "asc" or "desc".
|
||||
:returns: a list of :class:`Port` object.
|
||||
|
||||
"""
|
||||
db_ports = cls.dbapi.get_ports_by_node_id(node_id, limit=limit,
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return Port._from_db_object_list(db_ports, cls, context)
|
||||
|
||||
@base.remotable
|
||||
def create(self, context=None):
|
||||
"""Create a Port record in the DB.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Port(context)
|
||||
|
||||
"""
|
||||
values = self.obj_get_changes()
|
||||
db_port = self.dbapi.create_port(values)
|
||||
self._from_db_object(self, db_port)
|
||||
|
||||
@base.remotable
|
||||
def destroy(self, context=None):
|
||||
"""Delete the Port from the DB.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Port(context)
|
||||
"""
|
||||
self.dbapi.destroy_port(self.uuid)
|
||||
self.obj_reset_changes()
|
||||
|
||||
@base.remotable
|
||||
def save(self, context=None):
|
||||
"""Save updates to this Port.
|
||||
|
||||
Updates will be made column by column based on the result
|
||||
of self.what_changed().
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Port(context)
|
||||
"""
|
||||
updates = self.obj_get_changes()
|
||||
self.dbapi.update_port(self.uuid, updates)
|
||||
|
||||
self.obj_reset_changes()
|
||||
|
||||
@base.remotable
|
||||
def refresh(self, context=None):
|
||||
"""Loads updates for this Port.
|
||||
|
||||
Loads a port with the same uuid from the database and
|
||||
checks for updated attributes. Updates are applied from
|
||||
the loaded port column by column, if there are any updates.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
be used internally by the indirection_api.
|
||||
Unfortunately, RPC requires context as the first
|
||||
argument, even though we don't use it.
|
||||
A context should be set when instantiating the
|
||||
object, e.g.: Port(context)
|
||||
"""
|
||||
current = self.__class__.get_by_uuid(self._context, uuid=self.uuid)
|
||||
for field in self.fields:
|
||||
if (hasattr(self, base.get_attrname(field)) and
|
||||
self[field] != current[field]):
|
||||
self[field] = current[field]
|
@ -120,6 +120,7 @@ def remotable_classmethod(fn):
|
||||
# "orphaned" and remotable methods cannot be called.
|
||||
def remotable(fn):
|
||||
"""Decorator for remotable object methods."""
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
ctxt = self._context
|
||||
try:
|
||||
@ -514,7 +515,7 @@ class ObjectListBase(object):
|
||||
objects = []
|
||||
for entity in value:
|
||||
obj = IotronicObject.obj_from_primitive(entity,
|
||||
context=self._context)
|
||||
context=self._context)
|
||||
objects.append(obj)
|
||||
return objects
|
||||
|
||||
@ -539,9 +540,10 @@ class IotronicObjectSerializer(messaging.NoOpSerializer):
|
||||
"""A IotronicObject-aware Serializer.
|
||||
|
||||
This implements the Oslo Serializer interface and provides the
|
||||
ability to serialize and deserialize IotronicObject entities. Any service
|
||||
that needs to accept or return IotronicObjects as arguments or result values
|
||||
should pass this to its RpcProxy and RpcDispatcher objects.
|
||||
ability to serialize and deserialize IotronicObject entities.
|
||||
Any service that needs to accept or return IotronicObjects as
|
||||
arguments or result values should pass this to its RpcProxy
|
||||
and RpcDispatcher objects.
|
||||
"""
|
||||
|
||||
def _process_iterable(self, context, action_fn, values):
|
||||
@ -582,8 +584,8 @@ class IotronicObjectSerializer(messaging.NoOpSerializer):
|
||||
def obj_to_primitive(obj):
|
||||
"""Recursively turn an object into a python primitive.
|
||||
|
||||
An IotronicObject becomes a dict, and anything that implements ObjectListBase
|
||||
becomes a list.
|
||||
An IotronicObject becomes a dict, and anything that implements
|
||||
ObjectListBase becomes a list.
|
||||
"""
|
||||
if isinstance(obj, ObjectListBase):
|
||||
return [obj_to_primitive(x) for x in obj]
|
||||
|
@ -17,7 +17,6 @@ from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common import utils
|
||||
from iotronic.db import api as dbapi
|
||||
from iotronic.objects import base
|
||||
from iotronic.objects import utils as obj_utils
|
||||
@ -48,7 +47,10 @@ class Location(base.IotronicObject):
|
||||
@staticmethod
|
||||
def _from_db_object_list(db_objects, cls, context):
|
||||
"""Converts a list of database entities to a list of formal objects."""
|
||||
return [Location._from_db_object(cls(context), obj) for obj in db_objects]
|
||||
return [
|
||||
Location._from_db_object(
|
||||
cls(context),
|
||||
obj) for obj in db_objects]
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get(cls, context, location_id):
|
||||
@ -101,9 +103,9 @@ class Location(base.IotronicObject):
|
||||
|
||||
"""
|
||||
db_locations = cls.dbapi.get_location_list(limit=limit,
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return Location._from_db_object_list(db_locations, cls, context)
|
||||
|
||||
@base.remotable_classmethod
|
||||
@ -121,13 +123,13 @@ class Location(base.IotronicObject):
|
||||
|
||||
"""
|
||||
db_locations = cls.dbapi.get_locations_by_node_id(node_id, limit=limit,
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return Location._from_db_object_list(db_locations, cls, context)
|
||||
|
||||
@base.remotable
|
||||
def create(self,context=None):
|
||||
def create(self, context=None):
|
||||
"""Create a Location record in the DB.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
|
@ -104,8 +104,8 @@ class Node(base.IotronicObject):
|
||||
"""
|
||||
db_node = cls.dbapi.get_node_by_name(name)
|
||||
node = Node._from_db_object(cls(context), db_node)
|
||||
return node
|
||||
|
||||
return node
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_instance_uuid(cls, context, instance_uuid):
|
||||
"""Find a node based on the instance uuid and return a Node object.
|
||||
|
@ -17,7 +17,6 @@ from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic.common import utils
|
||||
from iotronic.db import api as dbapi
|
||||
from iotronic.objects import base
|
||||
from iotronic.objects import utils as obj_utils
|
||||
@ -48,7 +47,10 @@ class SessionWP(base.IotronicObject):
|
||||
@staticmethod
|
||||
def _from_db_object_list(db_objects, cls, context):
|
||||
"""Converts a list of database entities to a list of formal objects."""
|
||||
return [SessionWP._from_db_object(cls(context), obj) for obj in db_objects]
|
||||
return [
|
||||
SessionWP._from_db_object(
|
||||
cls(context),
|
||||
obj) for obj in db_objects]
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get(cls, context, session_id):
|
||||
@ -74,7 +76,7 @@ class SessionWP(base.IotronicObject):
|
||||
db_session = cls.dbapi.get_session_by_id(ses_id)
|
||||
session = SessionWP._from_db_object(cls(context), db_session)
|
||||
return session
|
||||
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_session_id(cls, context, session_id):
|
||||
"""Find a session based on its integer id and return a SessionWP object.
|
||||
@ -87,17 +89,17 @@ class SessionWP(base.IotronicObject):
|
||||
return session
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_session_by_node_uuid(cls,node_uuid,valid=True, context=None ):
|
||||
def get_session_by_node_uuid(cls, node_uuid, valid=True, context=None):
|
||||
"""Find a session based on uuid and return a :class:`SessionWP` object.
|
||||
|
||||
:param node_uuid: the uuid of a node.
|
||||
:param context: Security context
|
||||
:returns: a :class:`SessionWP` object.
|
||||
"""
|
||||
db_session = cls.dbapi.get_session_by_node_uuid(node_uuid,valid)
|
||||
db_session = cls.dbapi.get_session_by_node_uuid(node_uuid, valid)
|
||||
session = SessionWP._from_db_object(cls(context), db_session)
|
||||
return session
|
||||
|
||||
return session
|
||||
|
||||
@base.remotable_classmethod
|
||||
def list(cls, context, limit=None, marker=None,
|
||||
sort_key=None, sort_dir=None):
|
||||
@ -112,9 +114,9 @@ class SessionWP(base.IotronicObject):
|
||||
|
||||
"""
|
||||
db_sessions = cls.dbapi.get_session_list(limit=limit,
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
return SessionWP._from_db_object_list(db_sessions, cls, context)
|
||||
|
||||
'''
|
||||
@ -138,9 +140,9 @@ class SessionWP(base.IotronicObject):
|
||||
sort_dir=sort_dir)
|
||||
return SessionWP._from_db_object_list(db_sessions, cls, context)
|
||||
'''
|
||||
|
||||
|
||||
@base.remotable
|
||||
def create(self,context=None):
|
||||
def create(self, context=None):
|
||||
"""Create a SessionWP record in the DB.
|
||||
|
||||
:param context: Security context. NOTE: This should only
|
||||
|
@ -109,6 +109,7 @@ def nested_object_or_none(objclass):
|
||||
|
||||
def dt_serializer(name):
|
||||
"""Return a datetime serializer for a named attribute."""
|
||||
|
||||
def serializer(self, name=name):
|
||||
if getattr(self, name) is not None:
|
||||
return timeutils.isotime(getattr(self, name))
|
||||
|
@ -56,6 +56,7 @@ def list_opts():
|
||||
|
||||
|
||||
class EventletBackdoorConfigValueError(Exception):
|
||||
|
||||
def __init__(self, port_range, help_msg, ex):
|
||||
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
|
||||
'%(help)s' %
|
||||
@ -108,7 +109,7 @@ def _listen(host, start_port, end_port, listen_func):
|
||||
return listen_func((host, try_port))
|
||||
except socket.error as exc:
|
||||
if (exc.errno != errno.EADDRINUSE or
|
||||
try_port >= end_port):
|
||||
try_port >= end_port):
|
||||
raise
|
||||
try_port += 1
|
||||
|
||||
|
@ -29,6 +29,7 @@ LOG = logging.getLogger(__name__)
|
||||
# NOTE(zyluo): This lambda function was declared to avoid mocking collisions
|
||||
# with time.time() called in the standard logging module
|
||||
# during unittests.
|
||||
|
||||
_ts = lambda: time.time()
|
||||
|
||||
|
||||
@ -50,6 +51,7 @@ class LoopingCallDone(Exception):
|
||||
|
||||
|
||||
class LoopingCallBase(object):
|
||||
|
||||
def __init__(self, f=None, *args, **kw):
|
||||
self.args = args
|
||||
self.kw = kw
|
||||
|
@ -107,6 +107,7 @@ def periodic_task(*args, **kwargs):
|
||||
|
||||
|
||||
class _PeriodicTasksMeta(type):
|
||||
|
||||
def _add_periodic_task(cls, task):
|
||||
"""Add a periodic task to the list of periodic tasks.
|
||||
|
||||
@ -183,6 +184,7 @@ def _nearest_boundary(last_run, spacing):
|
||||
|
||||
@six.add_metaclass(_PeriodicTasksMeta)
|
||||
class PeriodicTasks(object):
|
||||
|
||||
def __init__(self):
|
||||
super(PeriodicTasks, self).__init__()
|
||||
self._periodic_last_run = {}
|
||||
|
@ -143,12 +143,14 @@ class Launcher(object):
|
||||
|
||||
|
||||
class SignalExit(SystemExit):
|
||||
|
||||
def __init__(self, signo, exccode=1):
|
||||
super(SignalExit, self).__init__(exccode)
|
||||
self.signo = signo
|
||||
|
||||
|
||||
class ServiceLauncher(Launcher):
|
||||
|
||||
def _handle_signal(self, signo, frame):
|
||||
# Allow the process to be killed again and die from natural causes
|
||||
_set_signals_handler(signal.SIG_DFL)
|
||||
@ -191,6 +193,7 @@ class ServiceLauncher(Launcher):
|
||||
|
||||
|
||||
class ServiceWrapper(object):
|
||||
|
||||
def __init__(self, service, workers):
|
||||
self.service = service
|
||||
self.workers = workers
|
||||
|
@ -36,6 +36,7 @@ class Thread(object):
|
||||
:class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when
|
||||
it has done so it can be removed from the threads list.
|
||||
"""
|
||||
|
||||
def __init__(self, thread, group):
|
||||
self.thread = thread
|
||||
self.thread.link(_thread_done, group=group, thread=self)
|
||||
@ -57,6 +58,7 @@ class ThreadGroup(object):
|
||||
when need be).
|
||||
* provide an easy API to add timers.
|
||||
"""
|
||||
|
||||
def __init__(self, thread_pool_size=10):
|
||||
self.pool = greenpool.GreenPool(thread_pool_size)
|
||||
self.threads = []
|
||||
|
0
iotronic/tests/__init__.py
Normal file
0
iotronic/tests/__init__.py
Normal file
23
iotronic/tests/base.py
Normal file
23
iotronic/tests/base.py
Normal file
@ -0,0 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2010-2011 OpenStack Foundation
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base
|
||||
|
||||
|
||||
class TestCase(base.BaseTestCase):
|
||||
|
||||
"""Test case base class for all unit tests."""
|
28
iotronic/tests/test_iotronic.py
Normal file
28
iotronic/tests/test_iotronic.py
Normal file
@ -0,0 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
test_iotronic
|
||||
----------------------------------
|
||||
|
||||
Tests for `iotronic` module.
|
||||
"""
|
||||
|
||||
from iotronic.tests import base
|
||||
|
||||
|
||||
class TestIotronic(base.TestCase):
|
||||
|
||||
def test_something(self):
|
||||
pass
|
@ -1,82 +1,108 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from autobahn.twisted.util import sleep
|
||||
from autobahn.twisted.wamp import ApplicationRunner
|
||||
from autobahn.twisted.wamp import ApplicationSession
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
import multiprocessing
|
||||
from autobahn.twisted.util import sleep
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
|
||||
msg_queue = None
|
||||
|
||||
msg_queue=None
|
||||
|
||||
|
||||
class Publisher(ApplicationSession):
|
||||
|
||||
def onJoin(self, details):
|
||||
print("Publisher session ready")
|
||||
|
||||
|
||||
|
||||
class Subscriber(ApplicationSession):
|
||||
|
||||
@inlineCallbacks
|
||||
def onJoin(self, details):
|
||||
print("Subscriber session ready")
|
||||
self.topic_reader = self.config.extra['topic']
|
||||
|
||||
def manage_msg(*args):
|
||||
print args
|
||||
print (args)
|
||||
|
||||
try:
|
||||
yield self.subscribe(manage_msg, self.topic_reader)
|
||||
print("subscribed to topic")
|
||||
except Exception as e:
|
||||
print("could not subscribe to topic: {0}".format(e))
|
||||
|
||||
|
||||
global msg_queue
|
||||
while True:
|
||||
if not msg_queue.empty():
|
||||
msg=msg_queue.get()
|
||||
msg = msg_queue.get()
|
||||
self.publish(msg['topic'], msg['message'])
|
||||
yield sleep(0.01)
|
||||
|
||||
class PublisherClient:
|
||||
def __init__(self,ip,port,realm):
|
||||
self.ip=unicode(ip)
|
||||
self.port=unicode(port)
|
||||
self.realm=unicode(realm)
|
||||
self._url = "ws://"+self.ip+":"+self.port+"/ws"
|
||||
self.runner = ApplicationRunner(url=unicode(self._url), realm=self.realm,
|
||||
#debug=True, debug_wamp=True, debug_app=True
|
||||
)
|
||||
|
||||
|
||||
class PublisherClient(object):
|
||||
|
||||
def __init__(self, ip, port, realm):
|
||||
self.ip = unicode(ip)
|
||||
self.port = unicode(port)
|
||||
self.realm = unicode(realm)
|
||||
self._url = "ws://" + self.ip + ":" + self.port + "/ws"
|
||||
self.runner = ApplicationRunner(
|
||||
url=unicode(self._url),
|
||||
realm=self.realm,
|
||||
# debug=True, debug_wamp=True,
|
||||
# debug_app=True
|
||||
)
|
||||
|
||||
def start(self):
|
||||
# Pass start_reactor=False to all runner.run() calls
|
||||
self.runner.run(Publisher, start_reactor=False)
|
||||
|
||||
class SubscriberClient:
|
||||
def __init__(self,ip,port,realm,topic):
|
||||
self.ip=unicode(ip)
|
||||
self.port=unicode(port)
|
||||
self.realm=unicode(realm)
|
||||
self.topic=unicode(topic)
|
||||
self._url = "ws://"+self.ip+":"+self.port+"/ws"
|
||||
self.runner = ApplicationRunner(url=unicode(self._url), realm=self.realm, extra={'topic':self.topic}
|
||||
#debug=True, debug_wamp=True, debug_app=True
|
||||
)
|
||||
|
||||
|
||||
|
||||
class SubscriberClient(object):
|
||||
|
||||
def __init__(self, ip, port, realm, topic):
|
||||
self.ip = unicode(ip)
|
||||
self.port = unicode(port)
|
||||
self.realm = unicode(realm)
|
||||
self.topic = unicode(topic)
|
||||
self._url = "ws://" + self.ip + ":" + self.port + "/ws"
|
||||
self.runner = ApplicationRunner(
|
||||
url=unicode(self._url),
|
||||
realm=self.realm,
|
||||
# debug=True, debug_wamp=True,
|
||||
# debug_app=True
|
||||
)
|
||||
|
||||
def start(self):
|
||||
# Pass start_reactor=False to all runner.run() calls
|
||||
self.runner.run(Subscriber, start_reactor=False)
|
||||
self.runner.run(Subscriber, start_reactor=False)
|
||||
|
||||
class ClientWamp:
|
||||
|
||||
def __init__(self,ip,port,realm,topic='board.connection'):
|
||||
server = SubscriberClient(ip,port,realm,topic)
|
||||
sendMessage = PublisherClient(ip,port,realm)
|
||||
|
||||
class ClientWamp(object):
|
||||
|
||||
def __init__(self, ip, port, realm, topic='board.connection'):
|
||||
server = SubscriberClient(ip, port, realm, topic)
|
||||
sendMessage = PublisherClient(ip, port, realm)
|
||||
server.start()
|
||||
sendMessage.start()
|
||||
|
||||
|
||||
from twisted.internet import reactor
|
||||
global msg_queue
|
||||
msg_queue = multiprocessing.Queue()
|
||||
multi = multiprocessing.Process(target=reactor.run, args=())
|
||||
multi.start()
|
||||
|
||||
def send(self,topic,msg):
|
||||
full_msg={'topic':unicode(topic),'message':unicode(msg)}
|
||||
msg_queue.put(full_msg)
|
||||
|
||||
def send(self, topic, msg):
|
||||
full_msg = {'topic': unicode(topic), 'message': unicode(msg)}
|
||||
msg_queue.put(full_msg)
|
||||
|
@ -1,19 +1,30 @@
|
||||
from iotronic import objects
|
||||
from oslo_utils import uuidutils
|
||||
import pecan
|
||||
from oslo_log import log
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from iotronic.common import exception
|
||||
from iotronic import objects
|
||||
from oslo_log import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
def leave_function(session_id):
|
||||
LOG.debug('Node with %s disconnectd',session_id)
|
||||
LOG.debug('Node with %s disconnectd', session_id)
|
||||
try:
|
||||
old_session=objects.SessionWP({}).get_by_session_id({},session_id)
|
||||
old_session.valid=False
|
||||
old_session = objects.SessionWP({}).get_by_session_id({}, session_id)
|
||||
old_session.valid = False
|
||||
old_session.save()
|
||||
LOG.debug('Session %s deleted', session_id)
|
||||
except:
|
||||
except Exception:
|
||||
LOG.debug('Error in deleting session %s', session_id)
|
||||
|
||||
|
||||
@ -21,25 +32,31 @@ def test():
|
||||
LOG.debug('hello')
|
||||
return u'hello!'
|
||||
|
||||
def registration(code_node,session_num):
|
||||
LOG.debug('Receved registration from %s with session %s',code_node, session_num)
|
||||
response=''
|
||||
|
||||
def registration(code_node, session_num):
|
||||
LOG.debug(
|
||||
'Receved registration from %s with session %s',
|
||||
code_node,
|
||||
session_num)
|
||||
response = ''
|
||||
try:
|
||||
node = objects.Node.get_by_code({}, code_node)
|
||||
except:
|
||||
except Exception:
|
||||
response = exception.NodeNotFound(node=code_node)
|
||||
try:
|
||||
old_session=objects.SessionWP({}).get_session_by_node_uuid(node.uuid,valid=True)
|
||||
old_session.valid=False
|
||||
old_session = objects.SessionWP(
|
||||
{}).get_session_by_node_uuid(
|
||||
node.uuid, valid=True)
|
||||
old_session.valid = False
|
||||
old_session.save()
|
||||
except:
|
||||
except Exception:
|
||||
LOG.debug('valid session for %s Not found', node.uuid)
|
||||
|
||||
session=objects.SessionWP({})
|
||||
session.node_id=node.id
|
||||
session.node_uuid=node.uuid
|
||||
session.session_id=session_num
|
||||
|
||||
session = objects.SessionWP({})
|
||||
session.node_id = node.id
|
||||
session.node_uuid = node.uuid
|
||||
session.session_id = session_num
|
||||
session.create()
|
||||
session.save()
|
||||
|
||||
return unicode(response)
|
||||
|
||||
return unicode(response)
|
||||
|
@ -1,32 +1,42 @@
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from autobahn.twisted.wamp import ApplicationRunner
|
||||
from autobahn.twisted.wamp import ApplicationSession
|
||||
import multiprocessing
|
||||
from twisted.internet import reactor
|
||||
from oslo_log import log
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
from twisted.internet import reactor
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
wamp_opts = [
|
||||
cfg.StrOpt('wamp_ip',
|
||||
default='127.0.0.1',
|
||||
help=('URL of wamp broker')),
|
||||
cfg.IntOpt('wamp_port',
|
||||
default=8181,
|
||||
help='port wamp broker'),
|
||||
cfg.StrOpt('wamp_realm',
|
||||
default='s4t',
|
||||
help=('realm broker')),
|
||||
cfg.StrOpt('wamp_ip', default='127.0.0.1', help='URL of wamp broker'),
|
||||
cfg.IntOpt('wamp_port', default=8181, help='port wamp broker'),
|
||||
cfg.StrOpt('wamp_realm', default='s4t', help='realm broker')
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(wamp_opts, 'wamp')
|
||||
|
||||
|
||||
class RPCWampManager(ApplicationSession):
|
||||
|
||||
|
||||
def __init__(self, config=None):
|
||||
ApplicationSession.__init__(self, config)
|
||||
LOG.info("RPC wamp manager created")
|
||||
|
||||
|
||||
'''
|
||||
#unused methods
|
||||
def onConnect(self):
|
||||
@ -40,11 +50,11 @@ class RPCWampManager(ApplicationSession):
|
||||
print("session left")
|
||||
import os, signal
|
||||
os.kill(multi.pid, signal.SIGKILL)
|
||||
|
||||
|
||||
def onDisconnect(self):
|
||||
print("transport disconnected")
|
||||
'''
|
||||
|
||||
|
||||
@inlineCallbacks
|
||||
def onJoin(self, details):
|
||||
LOG.info('RPC Wamp Session ready')
|
||||
@ -54,35 +64,38 @@ class RPCWampManager(ApplicationSession):
|
||||
try:
|
||||
yield self.register(fun.test, u'stack4things.test')
|
||||
yield self.register(fun.registration, u'stack4things.register')
|
||||
|
||||
|
||||
LOG.info("Procedures registered")
|
||||
except Exception as e:
|
||||
print("could not register procedure: {0}".format(e))
|
||||
|
||||
class RPCWampServer:
|
||||
def __init__(self,ip,port,realm):
|
||||
self.ip=unicode(ip)
|
||||
self.port=unicode(port)
|
||||
self.realm=unicode(realm)
|
||||
self._url = "ws://"+self.ip+":"+self.port+"/ws"
|
||||
self.runner = ApplicationRunner(url=unicode(self._url), realm=self.realm,
|
||||
#debug=True, debug_wamp=True, debug_app=True
|
||||
)
|
||||
|
||||
|
||||
class RPCWampServer(object):
|
||||
|
||||
def __init__(self, ip, port, realm):
|
||||
self.ip = unicode(ip)
|
||||
self.port = unicode(port)
|
||||
self.realm = unicode(realm)
|
||||
self._url = "ws://" + self.ip + ":" + self.port + "/ws"
|
||||
self.runner = ApplicationRunner(
|
||||
url=unicode(self._url),
|
||||
realm=self.realm,
|
||||
# debug=True, debug_wamp=True,
|
||||
# debug_app=True
|
||||
)
|
||||
|
||||
def start(self):
|
||||
# Pass start_reactor=False to all runner.run() calls
|
||||
self.runner.run(RPCWampManager, start_reactor=False)
|
||||
|
||||
|
||||
class RPC_Wamp_Server:
|
||||
|
||||
def __init__(self):
|
||||
self.ip=unicode(CONF.wamp.wamp_ip)
|
||||
self.port=unicode(CONF.wamp.wamp_port)
|
||||
self.realm=unicode(CONF.wamp.wamp_realm)
|
||||
server = RPCWampServer(self.ip,self.port,self.realm)
|
||||
server.start()
|
||||
multi = multiprocessing.Process(target=reactor.run,args=())
|
||||
multi.start()
|
||||
|
||||
|
||||
|
||||
class RPC_Wamp_Server(object):
|
||||
|
||||
def __init__(self):
|
||||
self.ip = unicode(CONF.wamp.wamp_ip)
|
||||
self.port = unicode(CONF.wamp.wamp_port)
|
||||
self.realm = unicode(CONF.wamp.wamp_realm)
|
||||
server = RPCWampServer(self.ip, self.port, self.realm)
|
||||
server.start()
|
||||
multi = multiprocessing.Process(target=reactor.run, args=())
|
||||
multi.start()
|
||||
|
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@ -0,0 +1,5 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
pbr>=1.6 # Apache-2.0
|
46
setup.cfg
Normal file
46
setup.cfg
Normal file
@ -0,0 +1,46 @@
|
||||
[metadata]
|
||||
name = iotronic
|
||||
summary = IoTronic is an Internet of Things resource management service for OpenStack clouds.
|
||||
description-file =
|
||||
README.rst
|
||||
author = OpenStack
|
||||
author-email = openstack-dev@lists.openstack.org
|
||||
home-page = http://www.openstack.org/
|
||||
classifier =
|
||||
Environment :: OpenStack
|
||||
Intended Audience :: Information Technology
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Operating System :: POSIX :: Linux
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.3
|
||||
Programming Language :: Python :: 3.4
|
||||
|
||||
[files]
|
||||
packages =
|
||||
iotronic
|
||||
|
||||
[build_sphinx]
|
||||
source-dir = doc/source
|
||||
build-dir = doc/build
|
||||
all_files = 1
|
||||
|
||||
[upload_sphinx]
|
||||
upload-dir = doc/build/html
|
||||
|
||||
[compile_catalog]
|
||||
directory = iotronic/locale
|
||||
domain = iotronic
|
||||
|
||||
[update_catalog]
|
||||
domain = iotronic
|
||||
output_dir = iotronic/locale
|
||||
input_file = iotronic/locale/iotronic.pot
|
||||
|
||||
[extract_messages]
|
||||
keywords = _ gettext ngettext l_ lazy_gettext
|
||||
mapping_file = babel.cfg
|
||||
output_file = iotronic/locale/iotronic.pot
|
104
setup.py
104
setup.py
@ -1,81 +1,29 @@
|
||||
import os
|
||||
from setuptools import setup, find_packages
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
def read(fname):
|
||||
return open(os.path.join(os.path.dirname(__file__), fname)).read()
|
||||
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
|
||||
import setuptools
|
||||
|
||||
setup(
|
||||
name = "iotronic",
|
||||
#packages = ["cwProbe", "plugins"],
|
||||
packages = find_packages(),
|
||||
version = "0.1",
|
||||
description = "iot",
|
||||
author = "",
|
||||
author_email = "",
|
||||
url = "",
|
||||
download_url = "",
|
||||
keywords = ["iotronic", "iot", "s4t"],
|
||||
classifiers = [
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Other Environment",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Operating System :: OS Independent",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
],
|
||||
license='GPL',
|
||||
platforms=['Any'],
|
||||
#provides=['plugins',],
|
||||
|
||||
# In python < 2.7.4, a lazy loading of package `pbr` will break
|
||||
# setuptools if some other modules registered functions in `atexit`.
|
||||
# solution from: http://bugs.python.org/issue15881#msg170215
|
||||
try:
|
||||
import multiprocessing # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
dependency_links = [
|
||||
|
||||
],
|
||||
|
||||
|
||||
|
||||
entry_points={
|
||||
#'cwprobe.plugins.monitors': [
|
||||
#'mycheck = plugins.cwpl_mycheckpoint:Cwpl_MyCheckPoint',
|
||||
#'cpu = plugins.cwpl_cpu:Cwpl_Cpu',
|
||||
#'awstats = plugins.cwpl_awstats:Cwpl_Awstat',
|
||||
#'test = plugins.cwpl_test:Cwpl_Test',
|
||||
#],
|
||||
},
|
||||
|
||||
install_requires=[
|
||||
#'setuptools',
|
||||
#'greenlet',
|
||||
#'httplib2',
|
||||
#'stevedore',
|
||||
#'psutil',
|
||||
#'qpid-python==0.20',
|
||||
#'pyyamllib',
|
||||
#'pyloglib',
|
||||
#'cwconfparser',
|
||||
#'MySQL-python',
|
||||
],
|
||||
|
||||
|
||||
include_package_data = True,
|
||||
|
||||
data_files=[
|
||||
('/usr/bin', ['bin/iotronic-conductor']),
|
||||
('/usr/bin', ['bin/iotronic']),
|
||||
],
|
||||
|
||||
|
||||
#package_data = {
|
||||
# '': ['scripts/etc/init.d/cwProbe', 'scripts/usr/bin/cwProbe'],
|
||||
#},
|
||||
|
||||
|
||||
#options = {'bdist_rpm':{'post_install' : 'scripts/post_install'},
|
||||
|
||||
zip_safe=False,
|
||||
#long_description=read('README.txt')
|
||||
|
||||
|
||||
)
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr'],
|
||||
pbr=True)
|
||||
|
@ -1,163 +0,0 @@
|
||||
#########################################################################################
|
||||
##
|
||||
## The MIT License (MIT)
|
||||
##
|
||||
## Copyright (c) 2014 Andrea Rocco Lotronto
|
||||
##
|
||||
## Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
## of this software and associated documentation files (the "Software"), to deal
|
||||
## in the Software without restriction, including without limitation the rights
|
||||
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
## copies of the Software, and to permit persons to whom the Software is
|
||||
## furnished to do so, subject to the following conditions:
|
||||
##
|
||||
## The above copyright notice and this permission notice shall be included in all
|
||||
## copies or substantial portions of the Software.
|
||||
##
|
||||
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
## SOFTWARE.
|
||||
########################################################################################
|
||||
|
||||
from autobahn.twisted.wamp import ApplicationSession
|
||||
from autobahn.twisted.wamp import ApplicationSessionFactory
|
||||
|
||||
from autobahn.twisted.websocket import WampWebSocketClientFactory
|
||||
|
||||
from autobahn.wamp.types import ComponentConfig
|
||||
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet.endpoints import clientFromString
|
||||
from twisted.python import log
|
||||
|
||||
import threading
|
||||
import time
|
||||
import sys
|
||||
log.startLogging(sys.stdout)
|
||||
|
||||
|
||||
##Global Variable for saving client writer session
|
||||
sessio_writer=None
|
||||
|
||||
|
||||
## WAMP Application Class for Writer Client ##
|
||||
class AutobahnClientWriter(ApplicationSession):
|
||||
|
||||
@inlineCallbacks
|
||||
def onJoin(self, details):
|
||||
|
||||
global sessio_writer
|
||||
sessio_writer = self
|
||||
yield log.msg('Client Writer Connected')
|
||||
######################################################
|
||||
|
||||
## WAMP Application Class for Reader Client ##
|
||||
class AutobahnClientReader(ApplicationSession):
|
||||
|
||||
@inlineCallbacks
|
||||
def onJoin(self, details):
|
||||
|
||||
log.msg('Client Reader Connected')
|
||||
|
||||
self.topic_reader = self.config.extra['topicReader']
|
||||
|
||||
def onMessage(*args):
|
||||
#DEBUG Message
|
||||
log.msg('I receives',args)
|
||||
##New Class Parser for MSG
|
||||
|
||||
try:
|
||||
yield self.subscribe(onMessage, self.topic_reader)
|
||||
print ("Subscribed to topic: "+self.topic_reader)
|
||||
|
||||
except Exception as e:
|
||||
print("could not subscribe to topic:" +self.topic_reader)
|
||||
######################################################
|
||||
|
||||
## Principal class for inizialating and starting clients WAMP
|
||||
class WampClient():
|
||||
|
||||
def __init__(self, topicRead='board.connection'):#Sistemare
|
||||
|
||||
self._topicRead = None
|
||||
self._debug = False
|
||||
self._debug_wamp = False
|
||||
self._debug_app = False
|
||||
|
||||
self._factoryWriter = None
|
||||
self._factoryReader = None
|
||||
|
||||
self._realm = None
|
||||
self._url = None
|
||||
|
||||
self._extra = {'topicReader': topicRead}
|
||||
|
||||
def connect(self, ip, port, realm):
|
||||
|
||||
self._realm = realm
|
||||
self._url = 'ws://'+ip+':'+'/ws'
|
||||
self._reactor_thread = None
|
||||
|
||||
self._session_factoryWriter = None
|
||||
self._session_factoryReader = None
|
||||
|
||||
cfgReader = ComponentConfig(self._realm, self._extra)
|
||||
cfgWriter = ComponentConfig(self._realm, self._extra)
|
||||
|
||||
self._session_factoryReader = ApplicationSessionFactory(cfgReader)
|
||||
self._session_factoryReader.session = AutobahnClientReader
|
||||
|
||||
self._session_factoryWriter = ApplicationSessionFactory(cfgWriter)
|
||||
self._session_factoryWriter.session = AutobahnClientWriter
|
||||
|
||||
|
||||
self._factoryReader = WampWebSocketClientFactory(self._session_factoryReader, url = self._url,
|
||||
debug = self._debug, debug_wamp = self._debug_wamp)
|
||||
|
||||
self._factoryWriter = WampWebSocketClientFactory(self._session_factoryWriter, url = self._url,
|
||||
debug = self._debug, debug_wamp = self._debug_wamp)
|
||||
|
||||
self._reactor_thread = threading.Thread(target=reactor.run, args=(False,))
|
||||
self._reactor_thread.daemon = True
|
||||
|
||||
endpoint_descriptor = 'tcp:'+ip+':'+port
|
||||
|
||||
self._clientReader = clientFromString(reactor, endpoint_descriptor)
|
||||
self._clientReader.connect(self._factoryReader)
|
||||
|
||||
self._clientWriter = clientFromString(reactor, endpoint_descriptor)
|
||||
self._clientWriter.connect(self._factoryWriter)
|
||||
|
||||
self._reactor_thread.start()
|
||||
|
||||
return self
|
||||
##################################################################################
|
||||
|
||||
|
||||
## Utility Class to wite on a specific topic ##
|
||||
def writeToTopic(topic, message):
|
||||
global sessio_writer
|
||||
sessio_writer.publish(topic,message)
|
||||
#######################################################
|
||||
|
||||
#####Config paramiters####
|
||||
ipWamp = '172.17.3.139'
|
||||
portWamp ='8181'
|
||||
realmWAMP = 's4t'
|
||||
##Topic Scrittura; Msg
|
||||
##########################
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
client = WampClient()
|
||||
test = client.connect(ipWamp, portWamp, realmWAMP)
|
||||
|
||||
while True:
|
||||
time.sleep(2)
|
||||
writeToTopic('board.connection', 'MEEEEEEEEEEEEEE')
|
@ -1,147 +0,0 @@
|
||||
from autobahn.twisted.websocket import WampWebSocketClientFactory
|
||||
from autobahn.twisted.websocket import WampWebSocketClientProtocol
|
||||
|
||||
from autobahn.twisted.websocket import WebSocketClientProtocol
|
||||
from autobahn.twisted.websocket import WebSocketClientFactory
|
||||
from autobahn.twisted.websocket import connectWS
|
||||
|
||||
from autobahn.twisted.wamp import ApplicationSessionFactory
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
#from twisted.python import log
|
||||
#import sys
|
||||
#log.startLogging(sys.stdout)
|
||||
import threading
|
||||
import Queue
|
||||
|
||||
# ----- twisted ----------
|
||||
class MyAppComponent(ApplicationSession):
|
||||
|
||||
def onJoin(self, details):
|
||||
if not self.factory._myAppSession:
|
||||
self.factory._myAppSession = self
|
||||
|
||||
def onLeave(self, details):
|
||||
if self.factory._myAppSession == self:
|
||||
self.factory._myAppSession = None
|
||||
|
||||
#-------------------------------------------------------
|
||||
class _WampClientProtocol(WampWebSocketClientProtocol):
|
||||
def __init__(self, factory):
|
||||
self.factory = factory
|
||||
|
||||
def onOpen(self):
|
||||
#log.msg("Client connected")
|
||||
self.factory.protocol_instance = self
|
||||
self.factory.base_client._connected_event.set()
|
||||
#--------------------------------------------------------
|
||||
|
||||
class _WampClientFactory(WampWebSocketClientFactory):
|
||||
def __init__(self, factory, *args, **kwargs):
|
||||
WampWebSocketClientFactory.__init__(self, factory, *args, **kwargs)
|
||||
self.protocol_instance = None
|
||||
self.base_client = None
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
return _WampClientProtocol(self)
|
||||
#------------------------------------------------------------
|
||||
|
||||
'''
|
||||
class _WebSocketClientProtocol(WebSocketClientProtocol):
|
||||
def __init__(self, factory):
|
||||
self.factory = factory
|
||||
|
||||
def onOpen(self):
|
||||
#log.debug("Client connected")
|
||||
self.factory.protocol_instance = self
|
||||
self.factory.base_client._connected_event.set()
|
||||
|
||||
class _WebSocketClientFactory(WebSocketClientFactory):
|
||||
def __init__(self, *args, **kwargs):
|
||||
WebSocketClientFactory.__init__(self, *args, **kwargs)
|
||||
self.protocol_instance = None
|
||||
self.base_client = None
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
return _WebSocketClientProtocol(self)
|
||||
'''
|
||||
# ------ end twisted -------
|
||||
|
||||
class BaseWBClient(object):
|
||||
|
||||
def __init__(self, websocket_settings):
|
||||
#self.settings = websocket_settings
|
||||
# instance to be set by the own factory
|
||||
self.factory = None
|
||||
# this event will be triggered on onOpen()
|
||||
self._connected_event = threading.Event()
|
||||
# queue to hold not yet dispatched messages
|
||||
self._send_queue = Queue.Queue()
|
||||
self._reactor_thread = None
|
||||
|
||||
self.session_factory = ApplicationSessionFactory()
|
||||
|
||||
def connect(self):
|
||||
|
||||
#log.msg("Connecting to 172.17.3.139:8181")
|
||||
self.factory = _WampClientFactory(self.session_factory,
|
||||
"ws://172.17.3.139:8181/ws",
|
||||
debug_wamp=True)
|
||||
self.factory.base_client = self
|
||||
|
||||
c = connectWS(self.factory)
|
||||
|
||||
self._reactor_thread = threading.Thread(target=reactor.run,
|
||||
args=(False,))
|
||||
self._reactor_thread.daemon = True
|
||||
self._reactor_thread.start()
|
||||
|
||||
def send_message(self, body):
|
||||
if not self._check_connection():
|
||||
return
|
||||
#log.msg("Queing send")
|
||||
self._send_queue.put(body)
|
||||
reactor.callFromThread(self._dispatch)
|
||||
|
||||
def _check_connection(self):
|
||||
if not self._connected_event.wait(timeout=10):
|
||||
#log.err("Unable to connect to server")
|
||||
self.close()
|
||||
return False
|
||||
return True
|
||||
|
||||
def _dispatch(self):
|
||||
#log.msg("Dispatching")
|
||||
while True:
|
||||
try:
|
||||
body = self._send_queue.get(block=False)
|
||||
except Queue.Empty:
|
||||
break
|
||||
self.factory.protocol_instance.sendMessage(body)
|
||||
|
||||
def close(self):
|
||||
reactor.callFromThread(reactor.stop)
|
||||
|
||||
import time
|
||||
def Ppippo(coda):
|
||||
while True:
|
||||
coda.send_message('YOOOOOOOO')
|
||||
time.sleep(5)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
ws_setting = {'host':'172.17.3.139', 'port':8080}
|
||||
|
||||
client = BaseWBClient(ws_setting)
|
||||
|
||||
t1 = threading.Thread(client.connect())
|
||||
#t11 = threading.Thread(Ppippo(client))
|
||||
#t11.start()
|
||||
t1.start()
|
||||
|
||||
#client.connect()
|
||||
#client.send_message('pippo')
|
||||
|
||||
|
||||
|
@ -1,107 +0,0 @@
|
||||
from autobahn.twisted.websocket import WebSocketClientProtocol
|
||||
from autobahn.twisted.websocket import WebSocketClientFactory
|
||||
from autobahn.twisted.websocket import connectWS
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
from twisted.python import log
|
||||
import sys
|
||||
log.startLogging(sys.stdout)
|
||||
import threading
|
||||
import Queue
|
||||
|
||||
# ----- twisted ----------
|
||||
class _WebSocketClientProtocol(WebSocketClientProtocol):
|
||||
def __init__(self, factory):
|
||||
self.factory = factory
|
||||
|
||||
def onOpen(self):
|
||||
#log.debug("Client connected")
|
||||
self.factory.protocol_instance = self
|
||||
self.factory.base_client._connected_event.set()
|
||||
|
||||
class _WebSocketClientFactory(WebSocketClientFactory):
|
||||
def __init__(self, *args, **kwargs):
|
||||
WebSocketClientFactory.__init__(self, *args, **kwargs)
|
||||
self.protocol_instance = None
|
||||
self.base_client = None
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
return _WebSocketClientProtocol(self)
|
||||
# ------ end twisted -------
|
||||
|
||||
class BaseWBClient(object):
|
||||
|
||||
def __init__(self, websocket_settings):
|
||||
#self.settings = websocket_settings
|
||||
# instance to be set by the own factory
|
||||
self.factory = None
|
||||
# this event will be triggered on onOpen()
|
||||
self._connected_event = threading.Event()
|
||||
# queue to hold not yet dispatched messages
|
||||
self._send_queue = Queue.Queue()
|
||||
self._reactor_thread = None
|
||||
|
||||
def connect(self):
|
||||
|
||||
log.msg("Connecting to 172.17.3.139:8282")
|
||||
self.factory = _WebSocketClientFactory(
|
||||
"ws://172.17.3.139:8282",
|
||||
debug=True)
|
||||
self.factory.base_client = self
|
||||
|
||||
c = connectWS(self.factory)
|
||||
|
||||
self._reactor_thread = threading.Thread(target=reactor.run,
|
||||
args=(False,))
|
||||
self._reactor_thread.daemon = True
|
||||
self._reactor_thread.start()
|
||||
|
||||
def send_message(self, body):
|
||||
if not self._check_connection():
|
||||
return
|
||||
log.msg("Queing send")
|
||||
self._send_queue.put(body)
|
||||
reactor.callFromThread(self._dispatch)
|
||||
|
||||
def _check_connection(self):
|
||||
if not self._connected_event.wait(timeout=10):
|
||||
log.err("Unable to connect to server")
|
||||
self.close()
|
||||
return False
|
||||
return True
|
||||
|
||||
def _dispatch(self):
|
||||
log.msg("Dispatching")
|
||||
while True:
|
||||
try:
|
||||
body = self._send_queue.get(block=False)
|
||||
except Queue.Empty:
|
||||
break
|
||||
self.factory.protocol_instance.sendMessage(body)
|
||||
|
||||
def close(self):
|
||||
reactor.callFromThread(reactor.stop)
|
||||
|
||||
import time
|
||||
def Ppippo(coda):
|
||||
while True:
|
||||
coda.send_message('YOOOOOOOO')
|
||||
time.sleep(5)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
ws_setting = {'host':'172.17.3.139', 'port':8080}
|
||||
|
||||
client = BaseWBClient(ws_setting)
|
||||
|
||||
t1 = threading.Thread(client.connect())
|
||||
t11 = threading.Thread(Ppippo(client))
|
||||
t11.start()
|
||||
t1.start()
|
||||
|
||||
#client.connect()
|
||||
#client.send_message('pippo')
|
||||
|
||||
|
||||
|
@ -1,40 +0,0 @@
|
||||
from autobahn.twisted.websocket import WebSocketServerProtocol, \
|
||||
WebSocketServerFactory
|
||||
|
||||
|
||||
class MyServerProtocol(WebSocketServerProtocol):
|
||||
|
||||
def onConnect(self, request):
|
||||
print("Client connecting: {0}".format(request.peer))
|
||||
|
||||
def onOpen(self):
|
||||
print("WebSocket connection open.")
|
||||
|
||||
def onMessage(self, payload, isBinary):
|
||||
if isBinary:
|
||||
print("Binary message received: {0} bytes".format(len(payload)))
|
||||
else:
|
||||
print("Text message received: {0}".format(payload.decode('utf8')))
|
||||
|
||||
## echo back message verbatim
|
||||
self.sendMessage(payload, isBinary)
|
||||
|
||||
def onClose(self, wasClean, code, reason):
|
||||
print("WebSocket connection closed: {0}".format(reason))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.python import log
|
||||
from twisted.internet import reactor
|
||||
|
||||
log.startLogging(sys.stdout)
|
||||
|
||||
factory = WebSocketServerFactory("ws://172.17.3.139:8282", debug = False)
|
||||
factory.protocol = MyServerProtocol
|
||||
|
||||
reactor.listenTCP(8282, factory)
|
||||
reactor.run()
|
@ -1,46 +0,0 @@
|
||||
from autobahn.twisted.websocket import WebSocketClientProtocol, \
|
||||
WebSocketClientFactory
|
||||
|
||||
|
||||
|
||||
class MyClientProtocol(WebSocketClientProtocol):
|
||||
|
||||
def onConnect(self, response):
|
||||
print("Server connected: {0}".format(response.peer))
|
||||
|
||||
def onOpen(self):
|
||||
print("WebSocket connection open.")
|
||||
|
||||
def hello():
|
||||
self.sendMessage(u"Hello, world!".encode('utf8'))
|
||||
self.sendMessage(b"\x00\x01\x03\x04", isBinary = True)
|
||||
self.factory.reactor.callLater(1, hello)
|
||||
|
||||
## start sending messages every second ..
|
||||
hello()
|
||||
|
||||
def onMessage(self, payload, isBinary):
|
||||
if isBinary:
|
||||
print("Binary message received: {0} bytes".format(len(payload)))
|
||||
else:
|
||||
print("Text message received: {0}".format(payload.decode('utf8')))
|
||||
|
||||
def onClose(self, wasClean, code, reason):
|
||||
print("WebSocket connection closed: {0}".format(reason))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.python import log
|
||||
from twisted.internet import reactor
|
||||
|
||||
log.startLogging(sys.stdout)
|
||||
|
||||
factory = WebSocketClientFactory("ws://localhost:9000", debug = False)
|
||||
factory.protocol = MyClientProtocol
|
||||
|
||||
reactor.connectTCP("localhost", 9000, factory)
|
||||
reactor.run()
|
@ -1,97 +0,0 @@
|
||||
|
||||
wampAddress = 'ws://172.17.3.139:8181/ws'
|
||||
wampRealm = 's4t'
|
||||
|
||||
#from threading import Thread
|
||||
|
||||
from autobahn.twisted.wamp import ApplicationRunner
|
||||
from autobahn.twisted.wamp import ApplicationSession
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
#import per test
|
||||
from twisted.internet.defer import DeferredQueue
|
||||
from twisted.internet import threads
|
||||
|
||||
#Classe autobahn per ka gestione della comunicazione con i dispositivi remoti
|
||||
class AutobahnMRS(ApplicationSession):
|
||||
@inlineCallbacks
|
||||
def onJoin(self, details):
|
||||
print("Sessio attached [Connect to WAMP Router] Sub")
|
||||
|
||||
def onMessage(*args):
|
||||
print args
|
||||
|
||||
try:
|
||||
yield self.subscribe(onMessage, 'test')
|
||||
print ("Subscribed to topic: test")
|
||||
|
||||
except Exception as e:
|
||||
print("Exception:" +e)
|
||||
|
||||
|
||||
#Classe autobahn per la gestione della comunicazione interna
|
||||
class AutobahnIM(ApplicationSession):
|
||||
|
||||
@inlineCallbacks
|
||||
def onJoin(self, details):
|
||||
print("Sessio attached [Connect to WAMP Router] Pub")
|
||||
|
||||
try:
|
||||
yield self.publish('test','YOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO')
|
||||
print ("Publish to topic: test")
|
||||
|
||||
except Exception as e:
|
||||
print("Exception:" +e)
|
||||
|
||||
|
||||
|
||||
#Classe per la gestione della comunicazioni con i dispositivi remoti
|
||||
class ManageRemoteSystem:
|
||||
def __init__(self):
|
||||
self.runner = ApplicationRunner(url= wampAddress, realm = wampRealm)
|
||||
|
||||
def start(self):
|
||||
self.runner.run(AutobahnMRS, start_reactor=False);
|
||||
|
||||
|
||||
#Classe per la gestione della comunicazione interna al ManageRemoteSystem
|
||||
class InternalMessages:
|
||||
def __init__(self):
|
||||
self.runner = ApplicationRunner(url= wampAddress, realm = wampRealm)
|
||||
|
||||
def start(self):
|
||||
self.runner.run(AutobahnIM, start_reactor=False);
|
||||
|
||||
#Classe principale per il servizio iotronic
|
||||
#class S4tServer:
|
||||
|
||||
def something():
|
||||
count = 0
|
||||
while True:
|
||||
print('something:', count)
|
||||
yield sleep(1)
|
||||
count+=1
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
#import multiprocessing
|
||||
|
||||
server = ManageRemoteSystem()
|
||||
#sendMessage = InternalMessages()
|
||||
server.start()
|
||||
#sendMessage.start()
|
||||
|
||||
from twisted.internet import reactor
|
||||
reactor.run()
|
||||
#thread1 = Thread(target = reactor.run())
|
||||
#thread2 = Thread(target = something())
|
||||
|
||||
#thread2.start()
|
||||
#thread1.start()
|
||||
|
||||
#thread1.daemon = True
|
||||
#thread2.daemon = True
|
||||
|
||||
#thread2.join()
|
||||
#thread1.join()
|
||||
|
14
test-requirements.txt
Normal file
14
test-requirements.txt
Normal file
@ -0,0 +1,14 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
hacking>=0.10.2,<0.11 # Apache-2.0
|
||||
|
||||
coverage>=3.6 # Apache-2.0
|
||||
python-subunit>=0.0.18 # Apache-2.0/BSD
|
||||
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3 # BSD
|
||||
oslosphinx>=2.5.0,!=3.4.0 # Apache-2.0
|
||||
oslotest>=1.10.0 # Apache-2.0
|
||||
testrepository>=0.0.18 # Apache-2.0/BSD
|
||||
testscenarios>=0.4 # Apache-2.0/BSD
|
||||
testtools>=1.4.0 # MIT
|
60
tox.ini
Normal file
60
tox.ini
Normal file
@ -0,0 +1,60 @@
|
||||
[tox]
|
||||
minversion = 2.0
|
||||
envlist = py34-constraints,py27-constraints,pypy-constraints,pep8-constraints
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
install_command =
|
||||
constraints: {[testenv:common-constraints]install_command}
|
||||
pip install -U {opts} {packages}
|
||||
setenv =
|
||||
VIRTUAL_ENV={envdir}
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
# commands = python setup.py test --slowest --testr-args='{posargs}'
|
||||
|
||||
[testenv:common-constraints]
|
||||
install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages}
|
||||
|
||||
[testenv:pep8]
|
||||
commands = flake8 {posargs}
|
||||
|
||||
[testenv:pep8-constraints]
|
||||
install_command = {[testenv:common-constraints]install_command}
|
||||
commands = flake8 {posargs}
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[testenv:venv-constraints]
|
||||
install_command = {[testenv:common-constraints]install_command}
|
||||
commands = {posargs}
|
||||
|
||||
[testenv:cover]
|
||||
commands = python setup.py test --coverage --testr-args='{posargs}'
|
||||
|
||||
[testenv:cover-constraints]
|
||||
install_command = {[testenv:common-constraints]install_command}
|
||||
commands = python setup.py test --coverage --testr-args='{posargs}'
|
||||
|
||||
[testenv:docs]
|
||||
commands = python setup.py build_sphinx
|
||||
|
||||
[testenv:docs-constraints]
|
||||
install_command = {[testenv:common-constraints]install_command}
|
||||
commands = python setup.py build_sphinx
|
||||
|
||||
[testenv:debug]
|
||||
commands = oslo_debug_helper {posargs}
|
||||
|
||||
[testenv:debug-constraints]
|
||||
install_command = {[testenv:common-constraints]install_command}
|
||||
commands = oslo_debug_helper {posargs}
|
||||
|
||||
[flake8]
|
||||
# E123, E125 skipped as they are invalid PEP-8.
|
||||
|
||||
show-source = True
|
||||
ignore = E123,E125
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build
|
@ -1,10 +0,0 @@
|
||||
from iotronic.wamp.clientwamp import ClientWamp
|
||||
from sys import stdin
|
||||
import inspect
|
||||
|
||||
c=ClientWamp('localhost','8181','s4t')
|
||||
c.send('board.connection','Hello from the chat wamp!')
|
||||
print 'USING',inspect.getfile(c.__class__)
|
||||
while True:
|
||||
userinput = stdin.readline()
|
||||
c.send('board.connection',str(userinput))
|
@ -1,20 +0,0 @@
|
||||
from autobahn.twisted.wamp import ApplicationSession
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
from autobahn.twisted.wamp import ApplicationRunner
|
||||
|
||||
|
||||
class RPCCaller(ApplicationSession):
|
||||
@inlineCallbacks
|
||||
def onJoin(self, details):
|
||||
print("session ready")
|
||||
try:
|
||||
#stack4things.iotronic.conductor.function
|
||||
res = yield self.call(u'stack4things.conductor.rpc.test',)
|
||||
print("call result: {}".format(res))
|
||||
except Exception as e:
|
||||
print("call error: {0}".format(e))
|
||||
|
||||
runner = ApplicationRunner(url=u"ws://localhost:8181/ws", realm=u"s4t")
|
||||
runner.run(RPCCaller)
|
||||
|
||||
|
@ -1,6 +0,0 @@
|
||||
from iotronic.wamp.rpcwampserver import RPC_Wamp_Server
|
||||
#NB USE THE IP ADDRESS INSTEAD OF THE HOSTNAME
|
||||
c=RPC_Wamp_Server('127.0.0.1','8181','s4t')
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user