Merge branch 'master' into testable-resources
This commit is contained in:
16
README.md
16
README.md
@@ -34,7 +34,9 @@ sudo docker exec -it foo
|
||||
* To get data for the resource bar (raw and pretty-JSON):
|
||||
```
|
||||
solar resource show --tag 'resources/bar'
|
||||
solar resource show --use-json --tag 'resources/bar' | jq .
|
||||
solar resource show --json --tag 'resources/bar' | jq .
|
||||
solar resource show --name 'resource_name'
|
||||
solar resource show --name 'resource_name' --json | jq .
|
||||
```
|
||||
|
||||
* To clear all resources/connections:
|
||||
@@ -50,12 +52,12 @@ cd /vagrant
|
||||
solar resource create node1 resources/ro_node/ '{"ip":"10.0.0.3", "ssh_key" : "/vagrant/.vagrant/machines/solar-dev1/virtualbox/private_key", "ssh_user":"vagrant"}'
|
||||
solar resource create mariadb_service resources/mariadb_service '{"image": "mariadb", "root_password": "mariadb", "port": 3306}'
|
||||
solar resource create keystone_db resources/mariadb_keystone_db/ '{"db_name": "keystone_db", "login_user": "root"}'
|
||||
solar resource create keystone_db_user resources/mariadb_user/ '{"user_name": "keystone", "user_password": "keystone", "login_user": "root"}'
|
||||
solar resource create keystone_db_user resources/mariadb_user/ user_name=keystone user_password=keystone # another valid format
|
||||
|
||||
solar connect node1 mariadb_service
|
||||
solar connect node1 keystone_db
|
||||
solar connect mariadb_service keystone_db --mapping '{"root_password": "login_password", "port": "login_port"}'
|
||||
solar connect mariadb_service keystone_db_user --mapping '{"root_password": "login_password", "port": "login_port"}'
|
||||
solar connect mariadb_service keystone_db '{"root_password": "login_password", "port": "login_port"}'
|
||||
# solar connect mariadb_service keystone_db_user 'root_password->login_password port->login_port' # another valid format
|
||||
solar connect keystone_db keystone_db_user
|
||||
|
||||
solar changes stage
|
||||
@@ -65,6 +67,7 @@ solar changes commit
|
||||
You can fiddle with the above configuration like this:
|
||||
```
|
||||
solar resource update keystone_db_user '{"user_password": "new_keystone_password"}'
|
||||
solar resource update keystone_db_user user_password=new_keystone_password # another valid format
|
||||
|
||||
solar changes stage
|
||||
solar changes commit
|
||||
@@ -76,6 +79,11 @@ solar connections show
|
||||
solar connections graph
|
||||
```
|
||||
|
||||
* You can make sure that all input values are correct and mapped without duplicating your values with this command:
|
||||
```
|
||||
solar resource validate
|
||||
```
|
||||
|
||||
# Low level API
|
||||
|
||||
## HAProxy deployment (not maintained)
|
||||
|
||||
@@ -42,7 +42,8 @@ def deploy():
|
||||
keystone_db_user = vr.create('keystone_db_user', 'resources/mariadb_user/', {'user_name': 'keystone', 'user_password': 'keystone', 'login_user': 'root'})[0]
|
||||
|
||||
keystone_config1 = vr.create('keystone_config1', GitProvider(GIT_KEYSTONE_RESOURCE_URL, path='keystone_config'), {'config_dir': '/etc/solar/keystone', 'admin_token': 'admin'})[0]
|
||||
keystone_service1 = vr.create('keystone_service1', RemoteZipProvider(ZIP_KEYSTONE_RESOURCE_URL, 'keystone_service'), {'port': 5001, 'admin_port': 35357})[0]
|
||||
#keystone_service1 = vr.create('keystone_service1', RemoteZipProvider(ZIP_KEYSTONE_RESOURCE_URL, 'keystone_service'), {'port': 5001, 'admin_port': 35357})[0]
|
||||
keystone_service1 = vr.create('keystone_service1', GitProvider(GIT_KEYSTONE_RESOURCE_URL, 'keystone_service'), {'port': 5001, 'admin_port': 35357})[0]
|
||||
|
||||
keystone_config2 = vr.create('keystone_config2', GitProvider(GIT_KEYSTONE_RESOURCE_URL, 'keystone_config'), {'config_dir': '/etc/solar/keystone', 'admin_token': 'admin'})[0]
|
||||
keystone_service2 = vr.create('keystone_service2', GitProvider(GIT_KEYSTONE_RESOURCE_URL, 'keystone_service'), {'port': 5002, 'admin_port': 35358})[0]
|
||||
|
||||
@@ -20,6 +20,6 @@ pip install -r solar/requirements.txt --download-cache=/tmp/$JOB_NAME
|
||||
|
||||
pushd solar/solar
|
||||
|
||||
PYTHONPATH=$WORKSPACE/solar CONFIG_FILE=$CONFIG_FILE py.test test/
|
||||
PYTHONPATH=$WORKSPACE/solar CONFIG_FILE=$CONFIG_FILE py.test -s test/
|
||||
|
||||
popd
|
||||
|
||||
@@ -13,3 +13,4 @@ redis==2.10.3
|
||||
pytest
|
||||
fakeredis
|
||||
Fabric==1.10.2
|
||||
tabulate==0.7.5
|
||||
|
||||
@@ -24,6 +24,7 @@ import networkx as nx
|
||||
import os
|
||||
import pprint
|
||||
import sys
|
||||
import tabulate
|
||||
import yaml
|
||||
|
||||
from solar import utils
|
||||
@@ -46,6 +47,31 @@ from solar.extensions.modules.discovery import Discovery
|
||||
db = get_db()
|
||||
|
||||
|
||||
# HELPERS
|
||||
def format_resource_input(resource_name, resource_input_name):
|
||||
return '{}::{}'.format(
|
||||
#click.style(resource_name, fg='white', bold=True),
|
||||
resource_name,
|
||||
click.style(resource_input_name, fg='yellow')
|
||||
)
|
||||
|
||||
def show_emitter_connections(emitter_name, destinations):
|
||||
inputs = sorted(destinations)
|
||||
|
||||
for emitter_input in inputs:
|
||||
click.echo(
|
||||
'{} -> {}'.format(
|
||||
format_resource_input(emitter_name, emitter_input),
|
||||
'[{}]'.format(
|
||||
', '.join(
|
||||
format_resource_input(*r)
|
||||
for r in destinations[emitter_input]
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.group()
|
||||
def main():
|
||||
pass
|
||||
@@ -79,13 +105,6 @@ def assign(resources, nodes):
|
||||
assign_resources_to_nodes(resources, nodes)
|
||||
|
||||
|
||||
# @main.command()
|
||||
# @click.option('-p', '--profile')
|
||||
# def connect(profile):
|
||||
# profile_ = db.get_record('profiles', profile)
|
||||
# connect_resources(profile_)
|
||||
|
||||
|
||||
@main.command()
|
||||
def discover():
|
||||
Discovery({'id': 'discovery'}).discover()
|
||||
@@ -177,16 +196,23 @@ def init_cli_connect():
|
||||
@main.command()
|
||||
@click.argument('emitter')
|
||||
@click.argument('receiver')
|
||||
@click.option('--mapping', default=None)
|
||||
@click.argument('mapping', default='')
|
||||
def connect(mapping, receiver, emitter):
|
||||
mapping_parsed = {}
|
||||
|
||||
click.echo('Connect {} to {}'.format(emitter, receiver))
|
||||
emitter = sresource.load(emitter)
|
||||
receiver = sresource.load(receiver)
|
||||
click.echo(emitter)
|
||||
click.echo(receiver)
|
||||
if mapping is not None:
|
||||
mapping = json.loads(mapping)
|
||||
signals.connect(emitter, receiver, mapping=mapping)
|
||||
try:
|
||||
mapping_parsed.update(json.loads(mapping))
|
||||
except ValueError:
|
||||
for m in mapping.split():
|
||||
k, v = m.split('->')
|
||||
mapping_parsed.update({k: v})
|
||||
signals.connect(emitter, receiver, mapping=mapping_parsed)
|
||||
|
||||
clients = signals.Connections.read_clients()
|
||||
show_emitter_connections(emitter.name, clients[emitter.name])
|
||||
|
||||
@main.command()
|
||||
@click.argument('emitter')
|
||||
@@ -199,6 +225,9 @@ def init_cli_connect():
|
||||
click.echo(receiver)
|
||||
signals.disconnect(emitter, receiver)
|
||||
|
||||
clients = signals.Connections.read_clients()
|
||||
show_emitter_connections(emitter.name, clients[emitter.name])
|
||||
|
||||
|
||||
def init_cli_connections():
|
||||
@main.group()
|
||||
@@ -212,29 +241,6 @@ def init_cli_connections():
|
||||
|
||||
@connections.command()
|
||||
def show():
|
||||
def format_resource_input(resource_name, resource_input_name):
|
||||
return '{}::{}'.format(
|
||||
#click.style(resource_name, fg='white', bold=True),
|
||||
resource_name,
|
||||
click.style(resource_input_name, fg='yellow')
|
||||
)
|
||||
|
||||
def show_emitter_connections(emitter_name, destinations):
|
||||
inputs = sorted(destinations)
|
||||
|
||||
for emitter_input in inputs:
|
||||
click.echo(
|
||||
'{} -> {}'.format(
|
||||
format_resource_input(emitter_name, emitter_input),
|
||||
'[{}]'.format(
|
||||
', '.join(
|
||||
format_resource_input(*r)
|
||||
for r in destinations[emitter_input]
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
clients = signals.Connections.read_clients()
|
||||
keys = sorted(clients)
|
||||
for emitter_name in keys:
|
||||
@@ -291,51 +297,64 @@ def init_cli_resource():
|
||||
|
||||
@resource.command()
|
||||
@click.argument('name')
|
||||
@click.argument('base_path')
|
||||
@click.argument('args')
|
||||
@click.argument('base_path', type=click.Path(exists=True, file_okay=False))
|
||||
@click.argument('args', nargs=-1)
|
||||
def create(args, base_path, name):
|
||||
args_parsed = {}
|
||||
|
||||
click.echo('create {} {} {}'.format(name, base_path, args))
|
||||
args = json.loads(args) if args else {}
|
||||
resources = vr.create(name, base_path, args)
|
||||
for arg in args:
|
||||
try:
|
||||
args_parsed.update(json.loads(arg))
|
||||
except ValueError:
|
||||
k, v = arg.split('=')
|
||||
args_parsed.update({k: v})
|
||||
resources = vr.create(name, base_path, args_parsed)
|
||||
for res in resources:
|
||||
print res.name
|
||||
click.echo(res.color_repr())
|
||||
|
||||
@resource.command()
|
||||
@click.option('--name', default=None)
|
||||
@click.option('--tag', default=None)
|
||||
@click.option('--use-json/--no-use-json', default=False)
|
||||
@click.option('--color/--no-color', default=True)
|
||||
def show(color, use_json, tag):
|
||||
@click.option('--json', default=False, is_flag=True)
|
||||
@click.option('--color', default=True, is_flag=True)
|
||||
def show(**kwargs):
|
||||
resources = []
|
||||
|
||||
for name, res in sresource.load_all().items():
|
||||
show = True
|
||||
if tag:
|
||||
if tag not in res.tags:
|
||||
if kwargs['tag']:
|
||||
if kwargs['tag'] not in res.tags:
|
||||
show = False
|
||||
if kwargs['name']:
|
||||
if res.name != kwargs['name']:
|
||||
show = False
|
||||
|
||||
if show:
|
||||
resources.append(res)
|
||||
|
||||
if use_json:
|
||||
echo = click.echo_via_pager
|
||||
if kwargs['json']:
|
||||
output = json.dumps([r.to_dict() for r in resources], indent=2)
|
||||
echo = click.echo
|
||||
else:
|
||||
if color:
|
||||
if kwargs['color']:
|
||||
formatter = lambda r: r.color_repr()
|
||||
else:
|
||||
formatter = lambda r: unicode(r)
|
||||
output = '\n'.join(formatter(r) for r in resources)
|
||||
|
||||
if output:
|
||||
click.echo_via_pager(output)
|
||||
echo(output)
|
||||
|
||||
|
||||
@resource.command()
|
||||
@click.argument('resource_path')
|
||||
@click.argument('resource_name')
|
||||
@click.argument('tag_name')
|
||||
@click.option('--add/--delete', default=True)
|
||||
def tag(add, tag_name, resource_path):
|
||||
click.echo('Tag {} with {} {}'.format(resource_path, tag_name, add))
|
||||
r = sresource.load(resource_path)
|
||||
def tag(add, tag_name, resource_name):
|
||||
click.echo('Tag {} with {} {}'.format(resource_name, tag_name, add))
|
||||
r = sresource.load(resource_name)
|
||||
if add:
|
||||
r.add_tag(tag_name)
|
||||
else:
|
||||
@@ -344,22 +363,43 @@ def init_cli_resource():
|
||||
|
||||
@resource.command()
|
||||
@click.argument('name')
|
||||
@click.argument('args')
|
||||
@click.argument('args', nargs=-1)
|
||||
def update(name, args):
|
||||
args = json.loads(args)
|
||||
click.echo('Updating resource {} with args {}'.format(name, args))
|
||||
args_parsed = {}
|
||||
for arg in args:
|
||||
try:
|
||||
args_parsed.update(json.loads(arg))
|
||||
except ValueError:
|
||||
k, v = arg.split('=')
|
||||
args_parsed.update({k: v})
|
||||
click.echo('Updating resource {} with args {}'.format(name, args_parsed))
|
||||
all = sresource.load_all()
|
||||
r = all[name]
|
||||
r.update(args)
|
||||
r.update(args_parsed)
|
||||
|
||||
@resource.command()
|
||||
def validate():
|
||||
@click.option('--check-missing-connections', default=False, is_flag=True)
|
||||
def validate(check_missing_connections):
|
||||
errors = vr.validate_resources()
|
||||
for r, error in errors:
|
||||
print 'ERROR: %s: %s' % (r.name, error)
|
||||
click.echo('ERROR: %s: %s' % (r.name, error))
|
||||
|
||||
if check_missing_connections:
|
||||
missing_connections = vr.find_missing_connections()
|
||||
if missing_connections:
|
||||
click.echo(
|
||||
'The following resources have inputs of the same value '
|
||||
'but are not connected:'
|
||||
)
|
||||
click.echo(
|
||||
tabulate.tabulate([
|
||||
['%s::%s' % (r1, i1), '%s::%s' % (r2, i2)]
|
||||
for r1, i1, r2, i2 in missing_connections
|
||||
])
|
||||
)
|
||||
|
||||
@resource.command()
|
||||
@click.argument('path')
|
||||
@click.argument('path', type=click.Path(exists=True, dir_okay=False))
|
||||
def get_inputs(path):
|
||||
with open(path) as f:
|
||||
content = f.read()
|
||||
|
||||
@@ -3,7 +3,7 @@ import handlers
|
||||
|
||||
|
||||
def resource_action(resource, action):
|
||||
handler = resource.metadata['handler']
|
||||
handler = resource.metadata.get('handler', 'none')
|
||||
with handlers.get(handler)([resource]) as h:
|
||||
return h.action(resource, action)
|
||||
|
||||
|
||||
@@ -67,6 +67,8 @@ class Resource(object):
|
||||
'Argument {} not implemented for resource {}'.format(k, self)
|
||||
)
|
||||
|
||||
if isinstance(v, dict) and 'value' in v:
|
||||
v = v['value']
|
||||
self.metadata['input'][k]['value'] = v
|
||||
|
||||
db.save(self.name, self.metadata, collection=db.COLLECTIONS.resource)
|
||||
@@ -168,6 +170,15 @@ def wrap_resource(raw_resource):
|
||||
return Resource(name, raw_resource, args, tags=tags, virtual_resource=virtual_resource)
|
||||
|
||||
|
||||
def wrap_resource_no_value(raw_resource):
|
||||
name = raw_resource['id']
|
||||
args = {k: v for k, v in raw_resource['input'].items()}
|
||||
tags = raw_resource.get('tags', [])
|
||||
virtual_resource = raw_resource.get('virtual_resource', [])
|
||||
|
||||
return Resource(name, raw_resource, args, tags=tags, virtual_resource=virtual_resource)
|
||||
|
||||
|
||||
def load(resource_name):
|
||||
raw_resource = db.read(resource_name, collection=db.COLLECTIONS.resource)
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ def create_resource(name, base_path, args, virtual_resource=None):
|
||||
resource = resource_module.Resource(name, metadata, args, tags, virtual_resource)
|
||||
return resource
|
||||
|
||||
|
||||
def create_virtual_resource(vr_name, template):
|
||||
resources = template['resources']
|
||||
connections = []
|
||||
@@ -63,6 +64,7 @@ def create_virtual_resource(vr_name, template):
|
||||
|
||||
return created_resources
|
||||
|
||||
|
||||
def create(name, base_path, kwargs, virtual_resource=None):
|
||||
if isinstance(base_path, resource_provider.BaseProvider):
|
||||
base_path = base_path.directory
|
||||
@@ -81,6 +83,7 @@ def create(name, base_path, kwargs, virtual_resource=None):
|
||||
|
||||
return resources
|
||||
|
||||
|
||||
def validate_resources():
|
||||
db = resource_module.load_all()
|
||||
all_errors = []
|
||||
@@ -93,6 +96,62 @@ def validate_resources():
|
||||
all_errors.append((r, errors))
|
||||
return all_errors
|
||||
|
||||
|
||||
def find_inputs_without_source():
|
||||
"""Find resources and inputs values of which are hardcoded.
|
||||
|
||||
:return: [(resource_name, input_name)]
|
||||
"""
|
||||
resources = resource_module.load_all()
|
||||
|
||||
ret = set([(r.name, input_name) for r in resources.values()
|
||||
for input_name in r.args])
|
||||
|
||||
clients = signals.Connections.read_clients()
|
||||
|
||||
for dest_dict in clients.values():
|
||||
for destinations in dest_dict.values():
|
||||
for receiver_name, receiver_input in destinations:
|
||||
try:
|
||||
ret.remove((receiver_name, receiver_input))
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
return list(ret)
|
||||
|
||||
|
||||
def find_missing_connections():
|
||||
"""Find resources whose input values are duplicated
|
||||
|
||||
and they are not connected between each other (i.e. the values
|
||||
are hardcoded, not coming from connection).
|
||||
|
||||
NOTE: this we could have 2 inputs of the same value living in 2 "circles".
|
||||
This is not covered, we find only inputs whose value is hardcoded.
|
||||
|
||||
:return: [(resource_name1, input_name1, resource_name2, input_name2)]
|
||||
"""
|
||||
ret = set()
|
||||
|
||||
resources = resource_module.load_all()
|
||||
|
||||
inputs_without_source = find_inputs_without_source()
|
||||
|
||||
for resource1, input1 in inputs_without_source:
|
||||
r1 = resources[resource1]
|
||||
v1 = r1.args[input1]
|
||||
|
||||
for resource2, input2 in inputs_without_source:
|
||||
r2 = resources[resource2]
|
||||
v2 = r2.args[input2]
|
||||
|
||||
if v1 == v2 and resource1 != resource2 and \
|
||||
(resource2, input2, resource1, input1) not in ret:
|
||||
ret.add((resource1, input1, resource2, input2))
|
||||
|
||||
return list(ret)
|
||||
|
||||
|
||||
def _compile_file(name, path, kwargs):
|
||||
with open(path) as f:
|
||||
content = f.read()
|
||||
@@ -101,11 +160,13 @@ def _compile_file(name, path, kwargs):
|
||||
template = _get_template(name, content, kwargs, inputs)
|
||||
return template
|
||||
|
||||
|
||||
def get_inputs(content):
|
||||
env = Environment()
|
||||
ast = env.parse(content)
|
||||
return meta.find_undeclared_variables(ast)
|
||||
|
||||
|
||||
def _get_template(name, content, kwargs, inputs):
|
||||
missing = []
|
||||
for input in inputs:
|
||||
@@ -117,5 +178,7 @@ def _get_template(name, content, kwargs, inputs):
|
||||
template = template.render(str=str, zip=zip, **kwargs)
|
||||
return template
|
||||
|
||||
|
||||
def is_virtual(path):
|
||||
return os.path.isfile(path)
|
||||
|
||||
|
||||
@@ -114,6 +114,8 @@ def commit(li, resources, commited, history):
|
||||
commited[li.res]['metadata'])
|
||||
result_state = execute(commited_res, 'remove')
|
||||
|
||||
staged_res.set_args_from_dict(staged_data['input'])
|
||||
|
||||
if result_state is state.STATES.success:
|
||||
result_state = execute(staged_res, 'run')
|
||||
else:
|
||||
|
||||
@@ -4,7 +4,7 @@ import tempfile
|
||||
import unittest
|
||||
import yaml
|
||||
|
||||
from solar.core import resource as xr
|
||||
from solar.core import virtual_resource as vr
|
||||
from solar.core import signals as xs
|
||||
from solar.interfaces.db import get_db
|
||||
|
||||
@@ -31,4 +31,4 @@ class BaseResourceTest(unittest.TestCase):
|
||||
return path
|
||||
|
||||
def create_resource(self, name, src, args):
|
||||
return xr.create(name, src, args)
|
||||
return vr.create(name, src, args)[0]
|
||||
|
||||
@@ -15,7 +15,8 @@ def default_resources():
|
||||
{'id': 'node1',
|
||||
'input': {'ip': {'value':'10.0.0.3'}}})
|
||||
rabbitmq_service1 = resource.wrap_resource(
|
||||
{'id':'rabbitmq', 'input': {
|
||||
{'id':'rabbitmq',
|
||||
'input': {
|
||||
'ip' : {'value': ''},
|
||||
'image': {'value': 'rabbitmq:3-management'}}})
|
||||
signals.connect(node1, rabbitmq_service1)
|
||||
|
||||
@@ -11,7 +11,8 @@ def resources():
|
||||
{'id': 'node1',
|
||||
'input': {'ip': {'value': '10.0.0.3'}}})
|
||||
mariadb_service1 = resource.wrap_resource(
|
||||
{'id': 'mariadb', 'input': {
|
||||
{'id': 'mariadb',
|
||||
'input': {
|
||||
'port' : {'value': 3306},
|
||||
'ip': {'value': ''}}})
|
||||
keystone_db = resource.wrap_resource(
|
||||
@@ -50,29 +51,85 @@ def test_update_port_on_mariadb(resources):
|
||||
('change', u'metadata.input.login_port.value', (3306, 4400))]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def simple_input():
|
||||
res1 = resource.wrap_resource(
|
||||
{'id': 'res1',
|
||||
'input': {'ip': {'value': '10.10.0.2'}}})
|
||||
res2 = resource.wrap_resource(
|
||||
{'id': 'res2',
|
||||
'input': {'ip': {'value': '10.10.0.3'}}})
|
||||
|
||||
signals.connect(res1, res2)
|
||||
return resource.load_all()
|
||||
|
||||
|
||||
def test_update_simple_resource(simple_input):
|
||||
operations.stage_changes()
|
||||
operations.commit_changes()
|
||||
|
||||
res1 = simple_input['res1']
|
||||
res1.update({'ip': '10.0.0.3'})
|
||||
|
||||
log = operations.stage_changes()
|
||||
|
||||
assert len(log) == 2
|
||||
|
||||
assert log.items[0].diff == [
|
||||
('change', u'input.ip.value', ('10.10.0.2', '10.0.0.3')),
|
||||
('change', 'metadata.input.ip.value', ('10.10.0.2', '10.0.0.3')),
|
||||
]
|
||||
assert log.items[1].diff == [
|
||||
('change', u'input.ip.value', ('10.10.0.2', '10.0.0.3')),
|
||||
('change', 'metadata.input.ip.value', ('10.10.0.2', '10.0.0.3')),
|
||||
]
|
||||
|
||||
operations.commit_changes()
|
||||
assert simple_input['res1'].args_dict() == {
|
||||
'ip': '10.0.0.3',
|
||||
}
|
||||
assert simple_input['res2'].args_dict() == {
|
||||
'ip': '10.0.0.3',
|
||||
}
|
||||
|
||||
log_item = operations.rollback_last()
|
||||
assert log_item.diff == [
|
||||
('change', u'input.ip.value', (u'10.0.0.3', u'10.10.0.2')),
|
||||
('change', 'metadata.input.ip.value', ('10.0.0.3', '10.10.0.2')),
|
||||
]
|
||||
|
||||
res2 = resource.load('res2')
|
||||
assert res2.args_dict() == {
|
||||
'ip': '10.10.0.2',
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def list_input():
|
||||
res1 = resource.wrap_resource(
|
||||
{'id': 'res1', 'input': {'ip': {'value': '10.10.0.2'}}})
|
||||
{'id': 'res1',
|
||||
'input': {'ip': {'value': '10.10.0.2'}}})
|
||||
res2 = resource.wrap_resource(
|
||||
{'id': 'res2', 'input': {'ip': {'value': '10.10.0.3'}}})
|
||||
{'id': 'res2',
|
||||
'input': {'ip': {'value': '10.10.0.3'}}})
|
||||
consumer = resource.wrap_resource(
|
||||
{'id': 'consumer', 'input':
|
||||
{'ips': {'value': [],
|
||||
'schema': ['str']}}})
|
||||
{'id': 'consumer',
|
||||
'input':
|
||||
{'ips': {'value': [],
|
||||
'schema': ['str']}}})
|
||||
|
||||
signals.connect(res1, consumer, {'ip': 'ips'})
|
||||
signals.connect(res2, consumer, {'ip': 'ips'})
|
||||
return resource.load_all()
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_update_list_resource(list_input):
|
||||
operations.stage_changes()
|
||||
operations.commit_changes()
|
||||
|
||||
res3 = resource.wrap_resource(
|
||||
{'id': 'res3', 'input': {'ip': {'value': '10.10.0.4'}}})
|
||||
{'id': 'res3',
|
||||
'input': {'ip': {'value': '10.10.0.4'}}})
|
||||
signals.connect(res3, list_input['consumer'], {'ip': 'ips'})
|
||||
|
||||
log = operations.stage_changes()
|
||||
@@ -110,5 +167,3 @@ def test_update_list_resource(list_input):
|
||||
{u'emitter': u'ip',
|
||||
u'emitter_attached_to': u'res2',
|
||||
u'value': u'10.10.0.3'}]}
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user