Clean imports in code

This patch set modifies lines which are importing objects
instead of modules. As per open-stack import guide lines, user should
import modules in a file not objects.

http://docs.openstack.org/developer/hacking/#imports

Change-Id: I5271d5d3b5ee0242a80c4d37a9297b910350796d
This commit is contained in:
Cao Xuan Hoang 2016-08-26 11:15:54 +07:00
parent 4ecf62ca87
commit c81cd05fc9
8 changed files with 72 additions and 71 deletions

View File

@ -18,15 +18,15 @@ from __future__ import absolute_import
from oslo_log import log as logging
from congress.dse2.data_service import DataService
from congress.dse2 import data_service
LOG = logging.getLogger(__name__)
class deepSix(DataService):
class deepSix(data_service.DataService):
"""A placeholder while we transition to the new arch."""
def __init__(self, name, keys, inbox=None, dataPath=None):
DataService.__init__(self, name)
data_service.DataService.__init__(self, name)
self.name = name
def log_info(self, msg, *args):

View File

@ -35,7 +35,7 @@ from congress.datasources import constants
from congress.db import api as db
from congress.db import datasources as datasources_db
from congress.db import db_ds_table_data
from congress.dse2.control_bus import DseNodeControlBus
from congress.dse2 import control_bus
from congress import exception
@ -131,7 +131,7 @@ class DseNode(object):
self.subscriptions = {}
# Note(ekcs): A little strange that _control_bus starts before self?
self._control_bus = DseNodeControlBus(self)
self._control_bus = control_bus.DseNodeControlBus(self)
self.register_service(self._control_bus)
# load configured drivers
self.loaded_drivers = self.load_drivers()

View File

@ -19,8 +19,8 @@ import sys
from oslo_config import cfg
from oslo_messaging import conffixture
from congress.dse2.data_service import DataService
from congress.dse2.dse_node import DseNode
from congress.dse2 import data_service
from congress.dse2 import dse_node
from congress.tests import base
@ -58,11 +58,11 @@ class TestControlBus(base.TestCase):
def _create_node_with_services(num):
nid = 'cbd_node%s' % num
nodes.append(DseNode(self.messaging_config, nid, []))
nodes.append(dse_node.DseNode(self.messaging_config, nid, []))
ns = []
for s in range(num):
# intentionally starting different number services
ns.append(DataService('cbd-%d_svc-%d' % (num, s)))
ns.append(data_service.DataService('cbd-%d_svc-%d' % (num, s)))
nodes[-1].register_service(ns[-1])
services.append(ns)
return nodes[-1]

View File

@ -40,7 +40,7 @@ from congress.api.system import driver_model
from congress.api import table_model
from congress.db import datasources as db_datasources
from congress import exception
from congress.policy_engines.agnostic import Dse2Runtime
from congress.policy_engines import agnostic
LOG = logging.getLogger(__name__)
@ -126,7 +126,7 @@ def create_api_models(bus):
def create_policy_engine():
"""Create policy engine and initialize it using the api models."""
engine = Dse2Runtime(ENGINE_SERVICE_NAME)
engine = agnostic.Dse2Runtime(ENGINE_SERVICE_NAME)
engine.debug_mode() # should take this out for production
return engine

View File

@ -17,8 +17,7 @@ import json
import mock
import time
from congress.dse2.data_service import DataService
from congress.dse2.data_service import DataServiceInfo
from congress.dse2 import data_service
from congress.tests import base
@ -29,15 +28,15 @@ class TestDataServiceInfo(base.TestCase):
'rpc_endpoints_info': ['call1', 'call2']}
def test_from_json(self):
s = DataServiceInfo.from_json(json.dumps(self.TESTDATA))
for a in DataServiceInfo.MARSHALL_ATTRS:
s = data_service.DataServiceInfo.from_json(json.dumps(self.TESTDATA))
for a in data_service.DataServiceInfo.MARSHALL_ATTRS:
self.assertEqual(getattr(s, a), self.TESTDATA[a],
"Attr '%s' set properly in from_dict" % a)
self.assertRaises(KeyError, DataServiceInfo.from_json,
self.assertRaises(KeyError, data_service.DataServiceInfo.from_json,
'{"bad_attr": 123}')
def test_to_json(self):
s = DataServiceInfo(**self.TESTDATA)
s = data_service.DataServiceInfo(**self.TESTDATA)
self.assertEqual(json.loads(s.to_json()), self.TESTDATA,
'JSON representation matches constructed data')
s.last_hb_time = time.time()
@ -45,15 +44,15 @@ class TestDataServiceInfo(base.TestCase):
'JSON representation ignores last_hb_time')
def test_from_dict(self):
s = DataServiceInfo.from_dict(self.TESTDATA)
for a in DataServiceInfo.MARSHALL_ATTRS:
s = data_service.DataServiceInfo.from_dict(self.TESTDATA)
for a in data_service.DataServiceInfo.MARSHALL_ATTRS:
self.assertEqual(getattr(s, a), self.TESTDATA[a],
"Attr '%s' set properly in from_dict" % a)
self.assertRaises(KeyError, DataServiceInfo.from_dict,
self.assertRaises(KeyError, data_service.DataServiceInfo.from_dict,
{'bad_attr': 123})
def test_to_dict(self):
s = DataServiceInfo(**self.TESTDATA)
s = data_service.DataServiceInfo(**self.TESTDATA)
self.assertEqual(s.to_dict(), self.TESTDATA,
'dict representation matches constructed data')
s.last_hb_time = time.time()
@ -64,7 +63,7 @@ class TestDataServiceInfo(base.TestCase):
class TestDataService(base.TestCase):
def test_info(self):
ds = DataService("svc1")
ds = data_service.DataService("svc1")
node = mock.MagicMock()
node.node_id = 'testnode'
ds.node = node
@ -76,7 +75,7 @@ class TestDataService(base.TestCase):
self.assertEqual(info.rpc_endpoints_info, [])
def test_start_stop(self):
ds = DataService("svc1")
ds = data_service.DataService("svc1")
ds.node = mock.MagicMock()
ds._rpc_server = mock.MagicMock()
self.assertEqual(ds._running, False,

View File

@ -22,11 +22,11 @@ cfg.CONF.distributed_architecture = True
from oslo_messaging import conffixture
from congress.datalog import compile
from congress.datasources.nova_driver import NovaDriver
from congress.datasources import nova_driver
from congress import exception as congressException
from congress.policy_engines.agnostic import Dse2Runtime
from congress.policy_engines import agnostic
from congress.tests import base
from congress.tests.fake_datasource import FakeDataSource
from congress.tests import fake_datasource
from congress.tests import helper
@ -41,8 +41,8 @@ class TestDSE(base.TestCase):
def test_intranode_pubsub(self):
node = helper.make_dsenode_new_partition('testnode')
test1 = FakeDataSource('test1')
test2 = FakeDataSource('test2')
test1 = fake_datasource.FakeDataSource('test1')
test2 = fake_datasource.FakeDataSource('test2')
node.register_service(test1)
node.register_service(test2)
@ -58,8 +58,8 @@ class TestDSE(base.TestCase):
# same as test_intranode_pubsub but with opposite ordering.
# (Ordering does matter with internode_pubsub).
node = helper.make_dsenode_new_partition('testnode')
test1 = FakeDataSource('test1')
test2 = FakeDataSource('test2')
test1 = fake_datasource.FakeDataSource('test1')
test2 = fake_datasource.FakeDataSource('test2')
node.register_service(test1)
node.register_service(test2)
@ -73,8 +73,8 @@ class TestDSE(base.TestCase):
def test_intranode_partial_unsub(self):
node = helper.make_dsenode_new_partition('testnode')
test1 = FakeDataSource('test1')
test2 = FakeDataSource('test2')
test1 = fake_datasource.FakeDataSource('test1')
test2 = fake_datasource.FakeDataSource('test2')
node.register_service(test1)
node.register_service(test2)
@ -90,10 +90,10 @@ class TestDSE(base.TestCase):
def test_internode_pubsub(self):
node1 = helper.make_dsenode_new_partition('testnode1')
test1 = FakeDataSource('test1')
test1 = fake_datasource.FakeDataSource('test1')
node1.register_service(test1)
node2 = helper.make_dsenode_same_partition(node1, 'testnode2')
test2 = FakeDataSource('test2')
test2 = fake_datasource.FakeDataSource('test2')
node2.register_service(test2)
test1.subscribe('test2', 'p')
@ -107,8 +107,8 @@ class TestDSE(base.TestCase):
def test_internode_partial_unsub(self):
node1 = helper.make_dsenode_new_partition('testnode1')
node2 = helper.make_dsenode_same_partition(node1, 'testnode2')
test1 = FakeDataSource('test1')
test2 = FakeDataSource('test2')
test1 = fake_datasource.FakeDataSource('test1')
test2 = fake_datasource.FakeDataSource('test2')
node1.register_service(test1)
node2.register_service(test2)
@ -124,12 +124,12 @@ class TestDSE(base.TestCase):
def test_multiservice_pubsub(self):
node1 = helper.make_dsenode_new_partition('testnode1')
test1 = FakeDataSource('test1')
test2 = FakeDataSource('test2')
test1 = fake_datasource.FakeDataSource('test1')
test2 = fake_datasource.FakeDataSource('test2')
node1.register_service(test1)
node1.register_service(test2)
node2 = helper.make_dsenode_same_partition(node1, 'testnode2')
test3 = FakeDataSource('test3')
test3 = fake_datasource.FakeDataSource('test3')
node2.register_service(test3)
test1.subscribe('test3', 'p')
@ -143,8 +143,8 @@ class TestDSE(base.TestCase):
def test_subscribe_snapshot(self):
node = helper.make_dsenode_new_partition('testnode')
test1 = FakeDataSource('test1')
test2 = FakeDataSource('test2')
test1 = fake_datasource.FakeDataSource('test1')
test2 = fake_datasource.FakeDataSource('test2')
node.register_service(test1)
node.register_service(test2)
@ -158,9 +158,9 @@ class TestDSE(base.TestCase):
nova_client = mock.MagicMock()
with mock.patch.object(novaclient.client.Client, '__init__',
return_value=nova_client):
nova = NovaDriver(
nova = nova_driver.NovaDriver(
name='nova', args=helper.datasource_openstack_args())
test = FakeDataSource('test')
test = fake_datasource.FakeDataSource('test')
node.register_service(nova)
node.register_service(test)
@ -177,9 +177,9 @@ class TestDSE(base.TestCase):
nova_client = mock.MagicMock()
with mock.patch.object(novaclient.client.Client, '__init__',
return_value=nova_client):
nova = NovaDriver(
nova = nova_driver.NovaDriver(
name='nova', args=helper.datasource_openstack_args())
test = FakeDataSource('test')
test = fake_datasource.FakeDataSource('test')
node.register_service(nova)
node.register_service(test)
@ -201,9 +201,9 @@ class TestDSE(base.TestCase):
nova_client = mock.MagicMock()
with mock.patch.object(novaclient.client.Client, '__init__',
return_value=nova_client):
nova = NovaDriver(
nova = nova_driver.NovaDriver(
name='nova', args=helper.datasource_openstack_args())
test = FakeDataSource('test')
test = fake_datasource.FakeDataSource('test')
node.register_service(nova)
node.register_service(test)
@ -218,8 +218,8 @@ class TestDSE(base.TestCase):
def test_datasource_poll(self):
node = helper.make_dsenode_new_partition('testnode')
node.always_snapshot = True # Note(ekcs): this test expects snapshot
pub = FakeDataSource('pub')
sub = FakeDataSource('sub')
pub = fake_datasource.FakeDataSource('pub')
sub = fake_datasource.FakeDataSource('sub')
node.register_service(pub)
node.register_service(sub)
@ -234,8 +234,8 @@ class TestDSE(base.TestCase):
"""Test policy correctly processes initial data snapshot."""
node = helper.make_dsenode_new_partition('testnode')
node.always_snapshot = False
data = FakeDataSource('data')
engine = Dse2Runtime('engine')
data = fake_datasource.FakeDataSource('data')
engine = agnostic.Dse2Runtime('engine')
node.register_service(data)
node.register_service(engine)
@ -252,8 +252,8 @@ class TestDSE(base.TestCase):
"""Test policy correctly processes initial data snapshot and update."""
node = helper.make_dsenode_new_partition('testnode')
node.always_snapshot = False
data = FakeDataSource('data')
engine = Dse2Runtime('engine')
data = fake_datasource.FakeDataSource('data')
engine = agnostic.Dse2Runtime('engine')
node.register_service(data)
node.register_service(engine)
@ -274,8 +274,8 @@ class TestDSE(base.TestCase):
"""Test policy correctly processes data on late subscribe."""
node = helper.make_dsenode_new_partition('testnode')
node.always_snapshot = False
data = FakeDataSource('data')
engine = Dse2Runtime('engine')
data = fake_datasource.FakeDataSource('data')
engine = agnostic.Dse2Runtime('engine')
node.register_service(data)
node.register_service(engine)
@ -302,7 +302,7 @@ class TestDSE(base.TestCase):
def test_unregister(self):
node = helper.make_dsenode_new_partition('testnode')
test1 = FakeDataSource('test1')
test1 = fake_datasource.FakeDataSource('test1')
node.register_service(test1)
obj = node.invoke_service_rpc(
'test1', 'get_status', {'source_id': None, 'params': None})
@ -319,7 +319,8 @@ class TestDSE(base.TestCase):
ns = []
for s in range(num):
# intentionally starting different number services
ns.append(FakeDataSource('cbd-%d_svc-%d' % (num, s)))
ns.append(
fake_datasource.FakeDataSource('cbd-%d_svc-%d' % (num, s)))
nodes[-1].register_service(ns[-1])
services.append(ns)
return nodes[-1]
@ -403,9 +404,9 @@ class TestDSE(base.TestCase):
publish.
"""
node = helper.make_dsenode_new_partition('testnode')
data = FakeDataSource('data')
policy = Dse2Runtime('policy')
policy2 = Dse2Runtime('policy2')
data = fake_datasource.FakeDataSource('data')
policy = agnostic.Dse2Runtime('policy')
policy2 = agnostic.Dse2Runtime('policy2')
node.register_service(data)
node.register_service(policy)
node.register_service(policy2)
@ -458,11 +459,11 @@ class TestDSE(base.TestCase):
"""Test correct local leader behavior with 2 PEs requesting exec"""
node1 = helper.make_dsenode_new_partition('testnode1')
node2 = helper.make_dsenode_same_partition(node1, 'testnode2')
dsd = FakeDataSource('dsd')
dsd = fake_datasource.FakeDataSource('dsd')
# faster time-out for testing
dsd.LEADER_TIMEOUT = 2
pe1 = Dse2Runtime('pe1')
pe2 = Dse2Runtime('pe2')
pe1 = agnostic.Dse2Runtime('pe1')
pe2 = agnostic.Dse2Runtime('pe2')
node1.register_service(pe1)
node2.register_service(pe2)
node1.register_service(dsd)

View File

@ -18,7 +18,7 @@ import eventlet
from oslo_config import cfg
from oslo_messaging import conffixture
from congress.dse2.data_service import DataService
from congress.dse2 import data_service
from congress.tests import base
from congress.tests import helper
@ -51,7 +51,7 @@ class _PingRpcEndpoint(object):
return args
class _PingRpcService(DataService):
class _PingRpcService(data_service.DataService):
def __init__(self, service_id, node_id):
self.endpoints = [_PingRpcEndpoint(node_id)]
super(_PingRpcService, self).__init__(service_id)
@ -79,7 +79,7 @@ class TestDseNode(base.TestCase):
self.messaging_config, [])
services = []
for i in range(2):
service = DataService('test-service-%s' % i)
service = data_service.DataService('test-service-%s' % i)
node.register_service(service)
services.append(service)
for s in node.get_services(True):

View File

@ -30,7 +30,7 @@ from congress.datalog import compile
from congress.datalog import unify
from congress.policy_engines import agnostic
from congress.dse2.dse_node import DseNode
from congress.dse2 import dse_node
LOG = logging.getLogger(__name__)
@ -50,8 +50,8 @@ def make_dsenode_new_partition(node_id,
"""Get new DseNode in it's own new DSE partition."""
messaging_config = messaging_config or generate_messaging_config()
node_rpc_endpoints = node_rpc_endpoints or []
return DseNode(messaging_config, node_id, node_rpc_endpoints,
partition_id=get_new_partition())
return dse_node.DseNode(messaging_config, node_id, node_rpc_endpoints,
partition_id=get_new_partition())
def make_dsenode_same_partition(existing,
@ -60,11 +60,12 @@ def make_dsenode_same_partition(existing,
node_rpc_endpoints=None):
"""Get new DseNode in the same DSE partition as existing (node or part)."""
partition_id = (existing.partition_id if
isinstance(existing, DseNode) else existing)
isinstance(existing, dse_node.DseNode) else existing)
messaging_config = messaging_config or generate_messaging_config()
node_rpc_endpoints = node_rpc_endpoints or []
return DseNode(messaging_config, node_id, node_rpc_endpoints, partition_id)
return dse_node.DseNode(
messaging_config, node_id, node_rpc_endpoints, partition_id)
def get_new_partition():