Merge pull request #170 from rackerlabs/stable-3
Promoting Stable-3 To Stable
This commit is contained in:
13
etc/sample_reconciler_config.json
Normal file
13
etc/sample_reconciler_config.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"client_class": "JSONBridgeClient",
|
||||
"client": {
|
||||
"url": "http://jsonbridge.example.com:8080/query/",
|
||||
"username": "bridgeuser",
|
||||
"password": "super_secure_password",
|
||||
"databases": {
|
||||
"RegionOne": "nova-regionone",
|
||||
"RegionTwo": "nova-regiontwo"
|
||||
}
|
||||
},
|
||||
"region_mapping_loc": "etc/sample_region_mapping.json"
|
||||
}
|
||||
6
etc/sample_region_mapping.json
Normal file
6
etc/sample_region_mapping.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"RegionOne.dev.global": "RegionOne",
|
||||
"RegionOne.dev.cell1": "RegionOne",
|
||||
"RegionTwo.dev.global": "RegionTwo",
|
||||
"RegionTwo.dev.cell1": "RegionTwo"
|
||||
}
|
||||
@@ -30,12 +30,26 @@ from django.db.models import F
|
||||
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
from stacktach import models
|
||||
from stacktach.reconciler import Reconciler
|
||||
|
||||
OLD_LAUNCHES_QUERY = "select * from stacktach_instanceusage " \
|
||||
"where launched_at is not null and " \
|
||||
"launched_at < %s and instance not in " \
|
||||
"(select distinct(instance) " \
|
||||
"from stacktach_instancedeletes where deleted_at < %s)"
|
||||
OLD_LAUNCHES_QUERY = """
|
||||
select stacktach_instanceusage.id,
|
||||
stacktach_instanceusage.instance,
|
||||
stacktach_instanceusage.launched_at from stacktach_instanceusage
|
||||
left outer join stacktach_instancedeletes on
|
||||
stacktach_instanceusage.instance = stacktach_instancedeletes.instance
|
||||
left outer join stacktach_instancereconcile on
|
||||
stacktach_instanceusage.instance = stacktach_instancereconcile.instance
|
||||
where (
|
||||
stacktach_instancereconcile.deleted_at is null and (
|
||||
stacktach_instancedeletes.deleted_at is null or
|
||||
stacktach_instancedeletes.deleted_at > %s
|
||||
)
|
||||
or (stacktach_instancereconcile.deleted_at is not null and
|
||||
stacktach_instancereconcile.deleted_at > %s)
|
||||
) and stacktach_instanceusage.launched_at < %s;"""
|
||||
|
||||
reconciler = None
|
||||
|
||||
|
||||
def _get_new_launches(beginning, ending):
|
||||
@@ -63,35 +77,45 @@ def _get_exists(beginning, ending):
|
||||
return models.InstanceExists.objects.filter(**filters)
|
||||
|
||||
|
||||
def _audit_launches_to_exists(launches, exists):
|
||||
def _audit_launches_to_exists(launches, exists, beginning):
|
||||
fails = []
|
||||
for (instance, launches) in launches.items():
|
||||
if instance in exists:
|
||||
for launch1 in launches:
|
||||
for expected in launches:
|
||||
found = False
|
||||
for launch2 in exists[instance]:
|
||||
if int(launch1['launched_at']) == int(launch2['launched_at']):
|
||||
for actual in exists[instance]:
|
||||
if int(expected['launched_at']) == \
|
||||
int(actual['launched_at']):
|
||||
# HACK (apmelton): Truncate the decimal because we may not
|
||||
# have the milliseconds.
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
rec = False
|
||||
if reconciler:
|
||||
args = (expected['id'], beginning)
|
||||
rec = reconciler.missing_exists_for_instance(*args)
|
||||
msg = "Couldn't find exists for launch (%s, %s)"
|
||||
msg = msg % (instance, launch1['launched_at'])
|
||||
fails.append(['Launch', launch1['id'], msg])
|
||||
msg = msg % (instance, expected['launched_at'])
|
||||
fails.append(['Launch', expected['id'], msg, 'Y' if rec else 'N'])
|
||||
else:
|
||||
rec = False
|
||||
if reconciler:
|
||||
args = (launches[0]['id'], beginning)
|
||||
rec = reconciler.missing_exists_for_instance(*args)
|
||||
msg = "No exists for instance (%s)" % instance
|
||||
fails.append(['Launch', '-', msg])
|
||||
fails.append(['Launch', '-', msg, 'Y' if rec else 'N'])
|
||||
return fails
|
||||
|
||||
|
||||
def _status_queries(exists_query):
|
||||
verified = exists_query.filter(status=models.InstanceExists.VERIFIED)
|
||||
reconciled = exists_query.filter(status=models.InstanceExists.RECONCILED)
|
||||
fail = exists_query.filter(status=models.InstanceExists.FAILED)
|
||||
pending = exists_query.filter(status=models.InstanceExists.PENDING)
|
||||
verifying = exists_query.filter(status=models.InstanceExists.VERIFYING)
|
||||
|
||||
return verified, fail, pending, verifying
|
||||
return verified, reconciled, fail, pending, verifying
|
||||
|
||||
|
||||
def _send_status_queries(exists_query):
|
||||
@@ -108,7 +132,8 @@ def _send_status_queries(exists_query):
|
||||
|
||||
|
||||
def _audit_for_exists(exists_query):
|
||||
(verified, fail, pending, verifying) = _status_queries(exists_query)
|
||||
(verified, reconciled,
|
||||
fail, pending, verifying) = _status_queries(exists_query)
|
||||
|
||||
(success, unsent, redirect,
|
||||
client_error, server_error) = _send_status_queries(verified)
|
||||
@@ -116,6 +141,7 @@ def _audit_for_exists(exists_query):
|
||||
report = {
|
||||
'count': exists_query.count(),
|
||||
'verified': verified.count(),
|
||||
'reconciled': reconciled.count(),
|
||||
'failed': fail.count(),
|
||||
'pending': pending.count(),
|
||||
'verifying': verifying.count(),
|
||||
@@ -175,8 +201,13 @@ def _launch_audit_for_period(beginning, ending):
|
||||
else:
|
||||
launches_dict[instance] = [l, ]
|
||||
|
||||
old_launches = models.InstanceUsage.objects.raw(OLD_LAUNCHES_QUERY,
|
||||
[beginning, beginning])
|
||||
# NOTE (apmelton)
|
||||
# Django's safe substitution doesn't allow dict substitution...
|
||||
# Thus, we send it 'beginning' three times...
|
||||
old_launches = models.InstanceUsage.objects\
|
||||
.raw(OLD_LAUNCHES_QUERY,
|
||||
[beginning, beginning, beginning])
|
||||
|
||||
old_launches_dict = {}
|
||||
for launch in old_launches:
|
||||
instance = launch.instance
|
||||
@@ -205,7 +236,8 @@ def _launch_audit_for_period(beginning, ending):
|
||||
exists_dict[instance] = [e, ]
|
||||
|
||||
launch_to_exists_fails = _audit_launches_to_exists(launches_dict,
|
||||
exists_dict)
|
||||
exists_dict,
|
||||
beginning)
|
||||
|
||||
return launch_to_exists_fails, new_launches.count(), len(old_launches_dict)
|
||||
|
||||
@@ -222,11 +254,11 @@ def audit_for_period(beginning, ending):
|
||||
|
||||
summary = {
|
||||
'verifier': verify_summary,
|
||||
'launch_fails': {
|
||||
'total_failures': len(detail),
|
||||
'launch_summary': {
|
||||
'new_launches': new_count,
|
||||
'old_launches': old_count
|
||||
}
|
||||
'old_launches': old_count,
|
||||
'failures': len(detail)
|
||||
},
|
||||
}
|
||||
|
||||
details = {
|
||||
@@ -266,7 +298,7 @@ def store_results(start, end, summary, details):
|
||||
'created': dt.dt_to_decimal(datetime.datetime.utcnow()),
|
||||
'period_start': start,
|
||||
'period_end': end,
|
||||
'version': 2,
|
||||
'version': 4,
|
||||
'name': 'nova usage audit'
|
||||
}
|
||||
|
||||
@@ -276,7 +308,7 @@ def store_results(start, end, summary, details):
|
||||
|
||||
def make_json_report(summary, details):
|
||||
report = [{'summary': summary},
|
||||
['Object', 'ID', 'Error Description']]
|
||||
['Object', 'ID', 'Error Description', 'Reconciled?']]
|
||||
report.extend(details['exist_fails'])
|
||||
report.extend(details['launch_fails'])
|
||||
return json.dumps(report)
|
||||
@@ -302,8 +334,20 @@ if __name__ == '__main__':
|
||||
help="If set to true, report will be stored. "
|
||||
"Otherwise, it will just be printed",
|
||||
type=bool, default=False)
|
||||
parser.add_argument('--reconcile',
|
||||
help="Enabled reconciliation",
|
||||
type=bool, default=False)
|
||||
parser.add_argument('--reconciler_config',
|
||||
help="Location of the reconciler config file",
|
||||
type=str,
|
||||
default='/etc/stacktach/reconciler-config.json')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.reconcile:
|
||||
with open(args.reconciler_config) as f:
|
||||
reconciler_config = json.load(f)
|
||||
reconciler = Reconciler(reconciler_config)
|
||||
|
||||
if args.utcdatetime is not None:
|
||||
time = args.utcdatetime
|
||||
else:
|
||||
|
||||
160
stacktach/migrations/0004_create_instancereconcile.py
Normal file
160
stacktach/migrations/0004_create_instancereconcile.py
Normal file
@@ -0,0 +1,160 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding model 'InstanceReconcile'
|
||||
db.create_table(u'stacktach_instancereconcile', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('row_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
|
||||
('row_updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
|
||||
('instance', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
|
||||
('launched_at', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=6, db_index=True)),
|
||||
('deleted_at', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=6, db_index=True)),
|
||||
('instance_type_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
|
||||
('source', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=150, null=True, blank=True)),
|
||||
))
|
||||
db.send_create_signal(u'stacktach', ['InstanceReconcile'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting model 'InstanceReconcile'
|
||||
db.delete_table(u'stacktach_instancereconcile')
|
||||
|
||||
|
||||
models = {
|
||||
u'stacktach.deployment': {
|
||||
'Meta': {'object_name': 'Deployment'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
|
||||
},
|
||||
u'stacktach.instancedeletes': {
|
||||
'Meta': {'object_name': 'InstanceDeletes'},
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
|
||||
},
|
||||
u'stacktach.instanceexists': {
|
||||
'Meta': {'object_name': 'InstanceExists'},
|
||||
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
|
||||
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
|
||||
},
|
||||
u'stacktach.instancereconcile': {
|
||||
'Meta': {'object_name': 'InstanceReconcile'},
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.instanceusage': {
|
||||
'Meta': {'object_name': 'InstanceUsage'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.jsonreport': {
|
||||
'Meta': {'object_name': 'JsonReport'},
|
||||
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
|
||||
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
|
||||
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
|
||||
},
|
||||
u'stacktach.lifecycle': {
|
||||
'Meta': {'object_name': 'Lifecycle'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
|
||||
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.rawdata': {
|
||||
'Meta': {'object_name': 'RawData'},
|
||||
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
|
||||
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
|
||||
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
|
||||
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.rawdataimagemeta': {
|
||||
'Meta': {'object_name': 'RawDataImageMeta'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.requesttracker': {
|
||||
'Meta': {'object_name': 'RequestTracker'},
|
||||
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
|
||||
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
|
||||
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.timing': {
|
||||
'Meta': {'object_name': 'Timing'},
|
||||
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['stacktach']
|
||||
@@ -0,0 +1,189 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'InstanceReconcile.tenant'
|
||||
db.add_column(u'stacktach_instancereconcile', 'tenant',
|
||||
self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
# Adding field 'InstanceReconcile.os_architecture'
|
||||
db.add_column(u'stacktach_instancereconcile', 'os_architecture',
|
||||
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
# Adding field 'InstanceReconcile.os_distro'
|
||||
db.add_column(u'stacktach_instancereconcile', 'os_distro',
|
||||
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
# Adding field 'InstanceReconcile.os_version'
|
||||
db.add_column(u'stacktach_instancereconcile', 'os_version',
|
||||
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
# Adding field 'InstanceReconcile.rax_options'
|
||||
db.add_column(u'stacktach_instancereconcile', 'rax_options',
|
||||
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting field 'InstanceReconcile.tenant'
|
||||
db.delete_column(u'stacktach_instancereconcile', 'tenant')
|
||||
|
||||
# Deleting field 'InstanceReconcile.os_architecture'
|
||||
db.delete_column(u'stacktach_instancereconcile', 'os_architecture')
|
||||
|
||||
# Deleting field 'InstanceReconcile.os_distro'
|
||||
db.delete_column(u'stacktach_instancereconcile', 'os_distro')
|
||||
|
||||
# Deleting field 'InstanceReconcile.os_version'
|
||||
db.delete_column(u'stacktach_instancereconcile', 'os_version')
|
||||
|
||||
# Deleting field 'InstanceReconcile.rax_options'
|
||||
db.delete_column(u'stacktach_instancereconcile', 'rax_options')
|
||||
|
||||
|
||||
models = {
|
||||
u'stacktach.deployment': {
|
||||
'Meta': {'object_name': 'Deployment'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
|
||||
},
|
||||
u'stacktach.instancedeletes': {
|
||||
'Meta': {'object_name': 'InstanceDeletes'},
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
|
||||
},
|
||||
u'stacktach.instanceexists': {
|
||||
'Meta': {'object_name': 'InstanceExists'},
|
||||
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
|
||||
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
|
||||
},
|
||||
u'stacktach.instancereconcile': {
|
||||
'Meta': {'object_name': 'InstanceReconcile'},
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.instanceusage': {
|
||||
'Meta': {'object_name': 'InstanceUsage'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.jsonreport': {
|
||||
'Meta': {'object_name': 'JsonReport'},
|
||||
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
|
||||
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
|
||||
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
|
||||
},
|
||||
u'stacktach.lifecycle': {
|
||||
'Meta': {'object_name': 'Lifecycle'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
|
||||
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.rawdata': {
|
||||
'Meta': {'object_name': 'RawData'},
|
||||
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
|
||||
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
|
||||
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
|
||||
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.rawdataimagemeta': {
|
||||
'Meta': {'object_name': 'RawDataImageMeta'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.requesttracker': {
|
||||
'Meta': {'object_name': 'RequestTracker'},
|
||||
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
|
||||
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
|
||||
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.timing': {
|
||||
'Meta': {'object_name': 'Timing'},
|
||||
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['stacktach']
|
||||
@@ -101,6 +101,14 @@ class InstanceUsage(models.Model):
|
||||
os_version = models.TextField(null=True, blank=True)
|
||||
rax_options = models.TextField(null=True, blank=True)
|
||||
|
||||
def deployment(self):
|
||||
raws = RawData.objects.filter(request_id=self.request_id)
|
||||
if raws.count() == 0:
|
||||
return False
|
||||
raw = raws[0]
|
||||
return raw.deployment
|
||||
|
||||
|
||||
class InstanceDeletes(models.Model):
|
||||
instance = models.CharField(max_length=50, null=True,
|
||||
blank=True, db_index=True)
|
||||
@@ -110,16 +118,44 @@ class InstanceDeletes(models.Model):
|
||||
decimal_places=6, db_index=True)
|
||||
raw = models.ForeignKey(RawData, null=True)
|
||||
|
||||
def deployment(self):
|
||||
return self.raw.deployment
|
||||
|
||||
|
||||
class InstanceReconcile(models.Model):
|
||||
row_created = models.DateTimeField(auto_now_add=True)
|
||||
row_updated = models.DateTimeField(auto_now=True)
|
||||
instance = models.CharField(max_length=50, null=True,
|
||||
blank=True, db_index=True)
|
||||
launched_at = models.DecimalField(null=True, max_digits=20,
|
||||
decimal_places=6, db_index=True)
|
||||
deleted_at = models.DecimalField(null=True, max_digits=20,
|
||||
decimal_places=6, db_index=True)
|
||||
instance_type_id = models.CharField(max_length=50,
|
||||
null=True,
|
||||
blank=True,
|
||||
db_index=True)
|
||||
tenant = models.CharField(max_length=50, null=True, blank=True,
|
||||
db_index=True)
|
||||
os_architecture = models.TextField(null=True, blank=True)
|
||||
os_distro = models.TextField(null=True, blank=True)
|
||||
os_version = models.TextField(null=True, blank=True)
|
||||
rax_options = models.TextField(null=True, blank=True)
|
||||
source = models.CharField(max_length=150, null=True,
|
||||
blank=True, db_index=True)
|
||||
|
||||
|
||||
class InstanceExists(models.Model):
|
||||
PENDING = 'pending'
|
||||
VERIFYING = 'verifying'
|
||||
VERIFIED = 'verified'
|
||||
RECONCILED = 'reconciled'
|
||||
FAILED = 'failed'
|
||||
STATUS_CHOICES = [
|
||||
(PENDING, 'Pending Verification'),
|
||||
(VERIFYING, 'Currently Being Verified'),
|
||||
(VERIFIED, 'Passed Verification'),
|
||||
(RECONCILED, 'Passed Verification After Reconciliation'),
|
||||
(FAILED, 'Failed Verification'),
|
||||
]
|
||||
instance = models.CharField(max_length=50, null=True,
|
||||
@@ -155,6 +191,9 @@ class InstanceExists(models.Model):
|
||||
os_version = models.TextField(null=True, blank=True)
|
||||
rax_options = models.TextField(null=True, blank=True)
|
||||
|
||||
def deployment(self):
|
||||
return self.raw.deployment
|
||||
|
||||
|
||||
class Timing(models.Model):
|
||||
"""Each Timing record corresponds to a .start/.end event pair
|
||||
|
||||
@@ -5,7 +5,7 @@ from stacktach import image_type
|
||||
class Notification(object):
|
||||
def __init__(self, body):
|
||||
self.body = body
|
||||
self.request_id = body['_context_request_id']
|
||||
self.request_id = body.get('_context_request_id', "")
|
||||
self.payload = body.get('payload', {})
|
||||
self.state = self.payload.get('state', "")
|
||||
self.old_state = self.payload.get('old_state', "")
|
||||
|
||||
158
stacktach/reconciler/__init__.py
Normal file
158
stacktach/reconciler/__init__.py
Normal file
@@ -0,0 +1,158 @@
|
||||
# Copyright (c) 2013 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import json
|
||||
|
||||
from stacktach import models
|
||||
from stacktach.reconciler import exceptions
|
||||
from stacktach.reconciler import nova
|
||||
from stacktach import stacklog
|
||||
|
||||
DEFAULT_CLIENT = nova.JSONBridgeClient
|
||||
|
||||
CONFIG = {
|
||||
'client_class': 'JSONBridgeClient',
|
||||
'client': {
|
||||
'url': 'http://stack.dev.ramielrowe.com:8080/query/',
|
||||
'username': '',
|
||||
'password': '',
|
||||
'databases': {
|
||||
'RegionOne': 'nova',
|
||||
}
|
||||
},
|
||||
'region_mapping_loc': '/etc/stacktach/region_mapping.json'
|
||||
}
|
||||
|
||||
|
||||
class Reconciler(object):
|
||||
|
||||
def __init__(self, config, client=None, region_mapping=None):
|
||||
self.config = config
|
||||
self.client = (client or Reconciler.load_client(config))
|
||||
self.region_mapping = (region_mapping or
|
||||
Reconciler.load_region_mapping(config))
|
||||
|
||||
@classmethod
|
||||
def load_client(cls, config):
|
||||
client_class = config.get('client_class')
|
||||
if client_class == 'JSONBridgeClient':
|
||||
return nova.JSONBridgeClient(config['client'])
|
||||
else:
|
||||
return DEFAULT_CLIENT(config['client'])
|
||||
|
||||
@classmethod
|
||||
def load_region_mapping(cls, config):
|
||||
with open(config['region_mapping_loc']) as f:
|
||||
return json.load(f)
|
||||
|
||||
def _region_for_usage(self, usage):
|
||||
deployment = usage.deployment()
|
||||
if deployment:
|
||||
deployment_name = str(deployment.name)
|
||||
if deployment_name in self.region_mapping:
|
||||
return self.region_mapping[deployment_name]
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
def _reconcile_instance(self, usage, src, deleted_at=None):
|
||||
values = {
|
||||
'instance': usage.instance,
|
||||
'launched_at': usage.launched_at,
|
||||
'deleted_at': deleted_at,
|
||||
'instance_type_id': usage.instance_type_id,
|
||||
'source': 'reconciler:%s' % src,
|
||||
'tenant': usage.tenant,
|
||||
'os_architecture': usage.os_architecture,
|
||||
'os_distro': usage.os_distro,
|
||||
'os_version': usage.os_version,
|
||||
'rax_options': usage.rax_options,
|
||||
}
|
||||
models.InstanceReconcile(**values).save()
|
||||
|
||||
def _fields_match(self, exists, instance):
|
||||
match_code = 0
|
||||
|
||||
if (exists.launched_at != instance['launched_at'] or
|
||||
exists.instance_type_id != instance['instance_type_id'] or
|
||||
exists.tenant != instance['tenant'] or
|
||||
exists.os_architecture != instance['os_architecture'] or
|
||||
exists.os_distro != instance['os_distro'] or
|
||||
exists.os_version != instance['os_version'] or
|
||||
exists.rax_options != instance['rax_options']):
|
||||
match_code = 1
|
||||
|
||||
if exists.deleted_at is not None:
|
||||
# Exists says deleted
|
||||
if (instance['deleted'] and
|
||||
exists.deleted_at != instance['deleted_at']):
|
||||
# Nova says deleted, but times don't match
|
||||
match_code = 2
|
||||
elif not instance['deleted']:
|
||||
# Nova says not deleted
|
||||
match_code = 3
|
||||
elif exists.deleted_at is None and instance['deleted']:
|
||||
# Exists says not deleted, but Nova says deleted
|
||||
match_code = 4
|
||||
|
||||
return match_code
|
||||
|
||||
def missing_exists_for_instance(self, launched_id,
|
||||
period_beginning):
|
||||
reconciled = False
|
||||
launch = models.InstanceUsage.objects.get(id=launched_id)
|
||||
region = self._region_for_usage(launch)
|
||||
try:
|
||||
instance = self.client.get_instance(region, launch.instance)
|
||||
if instance['deleted'] and instance['deleted_at'] is not None:
|
||||
# Check to see if instance has been deleted
|
||||
deleted_at = instance['deleted_at']
|
||||
|
||||
if deleted_at < period_beginning:
|
||||
# Check to see if instance was deleted before period.
|
||||
# If so, we shouldn't expect an exists.
|
||||
self._reconcile_instance(launch, self.client.src_str,
|
||||
deleted_at=instance['deleted_at'])
|
||||
reconciled = True
|
||||
except exceptions.NotFound:
|
||||
stacklog.info("Couldn't find instance for launch %s" % launched_id)
|
||||
|
||||
return reconciled
|
||||
|
||||
def failed_validation(self, exists):
|
||||
reconciled = False
|
||||
region = self._region_for_usage(exists)
|
||||
try:
|
||||
instance = self.client.get_instance(region, exists.instance,
|
||||
get_metadata=True)
|
||||
match_code = self._fields_match(exists, instance)
|
||||
if match_code == 0:
|
||||
self._reconcile_instance(exists, self.client.src_str,
|
||||
deleted_at=exists.deleted_at)
|
||||
reconciled = True
|
||||
else:
|
||||
msg = "Exists %s failed reconciliation with code %s"
|
||||
msg %= (exists.id, match_code)
|
||||
stacklog.info(msg)
|
||||
except exceptions.NotFound:
|
||||
stacklog.info("Couldn't find instance for exists %s" % exists.id)
|
||||
|
||||
return reconciled
|
||||
3
stacktach/reconciler/exceptions.py
Normal file
3
stacktach/reconciler/exceptions.py
Normal file
@@ -0,0 +1,3 @@
|
||||
class NotFound(Exception):
|
||||
def __init__(self, message="NotFound"):
|
||||
self.message = message
|
||||
90
stacktach/reconciler/nova.py
Normal file
90
stacktach/reconciler/nova.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import requests
|
||||
|
||||
from stacktach import utils as stackutils
|
||||
from stacktach.reconciler import exceptions
|
||||
from stacktach.reconciler.utils import empty_reconciler_instance
|
||||
|
||||
GET_INSTANCE_QUERY = "SELECT * FROM instances where uuid ='%s';"
|
||||
|
||||
METADATA_MAPPING = {
|
||||
'image_org.openstack__1__architecture': 'os_architecture',
|
||||
'image_org.openstack__1__os_distro': 'os_distro',
|
||||
'image_org.openstack__1__os_version': 'os_version',
|
||||
'image_com.rackspace__1__options': 'rax_options',
|
||||
}
|
||||
METADATA_FIELDS = ["'%s'" % x for x in METADATA_MAPPING.keys()]
|
||||
METADATA_FIELDS = ','.join(METADATA_FIELDS)
|
||||
|
||||
GET_INSTANCE_SYSTEM_METADATA = """
|
||||
SELECT * FROM instance_system_metadata
|
||||
WHERE instance_uuid = '%s' AND
|
||||
deleted = 0 AND `key` IN (%s);
|
||||
"""
|
||||
GET_INSTANCE_SYSTEM_METADATA %= ('%s', METADATA_FIELDS)
|
||||
|
||||
|
||||
def _json(result):
|
||||
if callable(result.json):
|
||||
return result.json()
|
||||
else:
|
||||
return result.json
|
||||
|
||||
|
||||
class JSONBridgeClient(object):
|
||||
src_str = 'json_bridge:nova_db'
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def _url_for_region(self, region):
|
||||
return self.config['url'] + self.config['databases'][region]
|
||||
|
||||
def _do_query(self, region, query):
|
||||
data = {'sql': query}
|
||||
credentials = (self.config['username'], self.config['password'])
|
||||
return _json(requests.post(self._url_for_region(region), data,
|
||||
verify=False, auth=credentials))
|
||||
|
||||
def _to_reconciler_instance(self, instance, metadata=None):
|
||||
r_instance = empty_reconciler_instance()
|
||||
r_instance.update({
|
||||
'id': instance['uuid'],
|
||||
'tenant': instance['project_id'],
|
||||
'instance_type_id': str(instance['instance_type_id']),
|
||||
})
|
||||
|
||||
if instance['launched_at'] is not None:
|
||||
launched_at = stackutils.str_time_to_unix(instance['launched_at'])
|
||||
r_instance['launched_at'] = launched_at
|
||||
|
||||
if instance['terminated_at'] is not None:
|
||||
deleted_at = stackutils.str_time_to_unix(instance['terminated_at'])
|
||||
r_instance['deleted_at'] = deleted_at
|
||||
|
||||
if instance['deleted'] != 0:
|
||||
r_instance['deleted'] = True
|
||||
|
||||
if metadata is not None:
|
||||
r_instance.update(metadata)
|
||||
|
||||
return r_instance
|
||||
|
||||
def _get_instance_meta(self, region, uuid):
|
||||
results = self._do_query(region, GET_INSTANCE_SYSTEM_METADATA % uuid)
|
||||
metadata = {}
|
||||
for result in results['result']:
|
||||
key = result['key']
|
||||
if key in METADATA_MAPPING:
|
||||
metadata[METADATA_MAPPING[key]] = result['value']
|
||||
return metadata
|
||||
|
||||
def get_instance(self, region, uuid, get_metadata=False):
|
||||
results = self._do_query(region, GET_INSTANCE_QUERY % uuid)['result']
|
||||
if len(results) > 0:
|
||||
metadata = None
|
||||
if get_metadata:
|
||||
metadata = self._get_instance_meta(region, uuid)
|
||||
return self._to_reconciler_instance(results[0], metadata=metadata)
|
||||
else:
|
||||
msg = "Couldn't find instance (%s) using JSON Bridge in region (%s)"
|
||||
raise exceptions.NotFound(msg % (uuid, region))
|
||||
14
stacktach/reconciler/utils.py
Normal file
14
stacktach/reconciler/utils.py
Normal file
@@ -0,0 +1,14 @@
|
||||
def empty_reconciler_instance():
|
||||
r_instance = {
|
||||
'id': None,
|
||||
'tenant': None,
|
||||
'launched_at': None,
|
||||
'deleted': False,
|
||||
'deleted_at': None,
|
||||
'instance_type_id': None,
|
||||
'os_architecture': '',
|
||||
'os_distro': '',
|
||||
'os_version': '',
|
||||
'rax_options': '',
|
||||
}
|
||||
return r_instance
|
||||
@@ -74,3 +74,9 @@ def error(msg, name=None):
|
||||
if name is None:
|
||||
name = default_logger_name
|
||||
get_logger(name=name).error(msg)
|
||||
|
||||
|
||||
def info(msg, name=None):
|
||||
if name is None:
|
||||
name = default_logger_name
|
||||
get_logger(name=name).info(msg)
|
||||
|
||||
@@ -20,7 +20,7 @@ urlpatterns = patterns('',
|
||||
url(r'stacky/watch/(?P<deployment_id>\d+)/$',
|
||||
'stacktach.stacky_server.do_watch'),
|
||||
url(r'stacky/kpi/$', 'stacktach.stacky_server.do_kpi'),
|
||||
url(r'stacky/kpi/(?P<tenant_id>\d+)/$', 'stacktach.stacky_server.do_kpi'),
|
||||
url(r'stacky/kpi/(?P<tenant_id>\w+)/$', 'stacktach.stacky_server.do_kpi'),
|
||||
url(r'stacky/usage/launches/$',
|
||||
'stacktach.stacky_server.do_list_usage_launches'),
|
||||
url(r'stacky/usage/deletes/$',
|
||||
|
||||
515
tests/unit/test_reconciler.py
Normal file
515
tests/unit/test_reconciler.py
Normal file
@@ -0,0 +1,515 @@
|
||||
# Copyright (c) 2013 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
|
||||
import mox
|
||||
import requests
|
||||
|
||||
from stacktach import models
|
||||
from stacktach import reconciler
|
||||
from stacktach import utils as stackutils
|
||||
from stacktach.reconciler import exceptions
|
||||
from stacktach.reconciler import nova
|
||||
from stacktach.reconciler import utils as rec_utils
|
||||
from tests.unit import utils
|
||||
from tests.unit.utils import INSTANCE_ID_1
|
||||
from tests.unit.utils import TENANT_ID_1
|
||||
|
||||
region_mapping = {
|
||||
'RegionOne.prod.cell1': 'RegionOne',
|
||||
'RegionTwo.prod.cell1': 'RegionTwo',
|
||||
}
|
||||
|
||||
DEFAULT_OS_ARCH = 'os_arch'
|
||||
DEFAULT_OS_DISTRO = 'os_dist'
|
||||
DEFAULT_OS_VERSION = "1.1"
|
||||
DEFAULT_RAX_OPTIONS = "rax_ops"
|
||||
|
||||
|
||||
class ReconcilerTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.client = self.mox.CreateMockAnything()
|
||||
self.client.src_str = 'mocked_client'
|
||||
self.reconciler = reconciler.Reconciler({},
|
||||
client=self.client,
|
||||
region_mapping=region_mapping)
|
||||
self.mox.StubOutWithMock(models, 'RawData', use_mock_anything=True)
|
||||
models.RawData.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'Deployment', use_mock_anything=True)
|
||||
models.Deployment.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'Lifecycle', use_mock_anything=True)
|
||||
models.Lifecycle.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'Timing', use_mock_anything=True)
|
||||
models.Timing.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'RequestTracker',
|
||||
use_mock_anything=True)
|
||||
models.RequestTracker.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceUsage',
|
||||
use_mock_anything=True)
|
||||
models.InstanceUsage.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceReconcile',
|
||||
use_mock_anything=True)
|
||||
models.InstanceReconcile.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceDeletes',
|
||||
use_mock_anything=True)
|
||||
models.InstanceDeletes.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceExists',
|
||||
use_mock_anything=True)
|
||||
models.InstanceExists.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'JsonReport', use_mock_anything=True)
|
||||
models.JsonReport.objects = self.mox.CreateMockAnything()
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
|
||||
def _fake_usage(self, is_exists=False, is_deleted=False,
|
||||
mock_deployment=False):
|
||||
usage = self.mox.CreateMockAnything()
|
||||
usage.id = 1
|
||||
beginning_d = utils.decimal_utc()
|
||||
usage.instance = INSTANCE_ID_1
|
||||
launched_at = beginning_d - (60*60)
|
||||
usage.launched_at = launched_at
|
||||
usage.instance_type_id = 1
|
||||
usage.tenant = TENANT_ID_1
|
||||
usage.os_architecture = DEFAULT_OS_ARCH
|
||||
usage.os_distro = DEFAULT_OS_DISTRO
|
||||
usage.os_version = DEFAULT_OS_VERSION
|
||||
usage.rax_options = DEFAULT_RAX_OPTIONS
|
||||
if is_exists:
|
||||
usage.deleted_at = None
|
||||
if is_deleted:
|
||||
usage.deleted_at = beginning_d
|
||||
if mock_deployment:
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
deployment.name = 'RegionOne.prod.cell1'
|
||||
usage.deployment().AndReturn(deployment)
|
||||
return usage
|
||||
|
||||
def _fake_reconciler_instance(self, uuid=INSTANCE_ID_1, launched_at=None,
|
||||
deleted_at=None, deleted=False,
|
||||
instance_type_id=1, tenant=TENANT_ID_1,
|
||||
os_arch=DEFAULT_OS_ARCH,
|
||||
os_distro=DEFAULT_OS_DISTRO,
|
||||
os_verison=DEFAULT_OS_VERSION,
|
||||
rax_options=DEFAULT_RAX_OPTIONS):
|
||||
instance = rec_utils.empty_reconciler_instance()
|
||||
instance.update({
|
||||
'id': uuid,
|
||||
'launched_at': launched_at,
|
||||
'deleted_at': deleted_at,
|
||||
'deleted': deleted,
|
||||
'instance_type_id': instance_type_id,
|
||||
'tenant': tenant,
|
||||
'os_architecture': os_arch,
|
||||
'os_distro': os_distro,
|
||||
'os_version': os_verison,
|
||||
'rax_options': rax_options,
|
||||
})
|
||||
return instance
|
||||
|
||||
def test_load_client_json_bridge(self):
|
||||
mock_config = self.mox.CreateMockAnything()
|
||||
config = {'client_class': 'JSONBridgeClient', 'client': mock_config}
|
||||
nova.JSONBridgeClient(mock_config)
|
||||
self.mox.ReplayAll()
|
||||
reconciler.Reconciler.load_client(config)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_load_client_no_class_loads_default_class(self):
|
||||
mock_config = self.mox.CreateMockAnything()
|
||||
config = {'client': mock_config}
|
||||
nova.JSONBridgeClient(mock_config)
|
||||
self.mox.ReplayAll()
|
||||
reconciler.Reconciler.load_client(config)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_load_client_incorrect_class_loads_default_class(self):
|
||||
mock_config = self.mox.CreateMockAnything()
|
||||
config = {'client_class': 'BadConfigValue', 'client': mock_config}
|
||||
nova.JSONBridgeClient(mock_config)
|
||||
self.mox.ReplayAll()
|
||||
reconciler.Reconciler.load_client(config)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_region_for_launch(self):
|
||||
launch = self.mox.CreateMockAnything()
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
deployment.name = 'RegionOne.prod.cell1'
|
||||
launch.deployment().AndReturn(deployment)
|
||||
self.mox.ReplayAll()
|
||||
region = self.reconciler._region_for_usage(launch)
|
||||
self.assertEqual('RegionOne', region)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_region_for_launch_no_mapping(self):
|
||||
launch = self.mox.CreateMockAnything()
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
deployment.name = 'RegionOne.prod.cell2'
|
||||
launch.deployment().AndReturn(deployment)
|
||||
self.mox.ReplayAll()
|
||||
region = self.reconciler._region_for_usage(launch)
|
||||
self.assertFalse(region)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_region_for_launch_no_raws(self):
|
||||
launch = self.mox.CreateMockAnything()
|
||||
launch.deployment()
|
||||
self.mox.ReplayAll()
|
||||
region = self.reconciler._region_for_usage(launch)
|
||||
self.assertFalse(region)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_missing_exists_for_instance(self):
|
||||
launch = self._fake_usage(mock_deployment=True)
|
||||
launched_at = launch.launched_at
|
||||
deleted_at = launched_at + (60*30)
|
||||
period_beginning = deleted_at + 1
|
||||
models.InstanceUsage.objects.get(id=launch.id).AndReturn(launch)
|
||||
rec_inst = self._fake_reconciler_instance(deleted=True,
|
||||
deleted_at=deleted_at)
|
||||
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndReturn(rec_inst)
|
||||
reconcile_vals = {
|
||||
'instance': launch.instance,
|
||||
'launched_at': launch.launched_at,
|
||||
'deleted_at': deleted_at,
|
||||
'instance_type_id': launch.instance_type_id,
|
||||
'source': 'reconciler:mocked_client',
|
||||
'tenant': TENANT_ID_1,
|
||||
'os_architecture': DEFAULT_OS_ARCH,
|
||||
'os_distro': DEFAULT_OS_DISTRO,
|
||||
'os_version': DEFAULT_OS_VERSION,
|
||||
'rax_options': DEFAULT_RAX_OPTIONS,
|
||||
}
|
||||
result = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile(**reconcile_vals).AndReturn(result)
|
||||
result.save()
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.missing_exists_for_instance(launch.id,
|
||||
period_beginning)
|
||||
self.assertTrue(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_missing_exists_for_instance_not_found(self):
|
||||
launch_id = 1
|
||||
beginning_d = utils.decimal_utc()
|
||||
launch = self.mox.CreateMockAnything()
|
||||
launch.instance = INSTANCE_ID_1
|
||||
launch.launched_at = beginning_d - (60*60)
|
||||
launch.instance_type_id = 1
|
||||
models.InstanceUsage.objects.get(id=launch_id).AndReturn(launch)
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
launch.deployment().AndReturn(deployment)
|
||||
deployment.name = 'RegionOne.prod.cell1'
|
||||
ex = exceptions.NotFound()
|
||||
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndRaise(ex)
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.missing_exists_for_instance(launch_id,
|
||||
beginning_d)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_failed_validation(self):
|
||||
exists = self._fake_usage(is_exists=True, mock_deployment=True)
|
||||
launched_at = exists.launched_at
|
||||
rec_inst = self._fake_reconciler_instance(launched_at=launched_at)
|
||||
self.client.get_instance('RegionOne', INSTANCE_ID_1,
|
||||
get_metadata=True).AndReturn(rec_inst)
|
||||
reconcile_vals = {
|
||||
'instance': exists.instance,
|
||||
'launched_at': exists.launched_at,
|
||||
'deleted_at': exists.deleted_at,
|
||||
'instance_type_id': exists.instance_type_id,
|
||||
'source': 'reconciler:mocked_client',
|
||||
'tenant': TENANT_ID_1,
|
||||
'os_architecture': DEFAULT_OS_ARCH,
|
||||
'os_distro': DEFAULT_OS_DISTRO,
|
||||
'os_version': DEFAULT_OS_VERSION,
|
||||
'rax_options': DEFAULT_RAX_OPTIONS,
|
||||
}
|
||||
result = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile(**reconcile_vals).AndReturn(result)
|
||||
result.save()
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.failed_validation(exists)
|
||||
self.assertTrue(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_failed_validation_deleted(self):
|
||||
exists = self._fake_usage(is_exists=True, is_deleted=True,
|
||||
mock_deployment=True)
|
||||
launched_at = exists.launched_at
|
||||
deleted_at = exists.deleted_at
|
||||
rec_inst = self._fake_reconciler_instance(launched_at=launched_at,
|
||||
deleted=True,
|
||||
deleted_at=deleted_at)
|
||||
self.client.get_instance('RegionOne', INSTANCE_ID_1,
|
||||
get_metadata=True).AndReturn(rec_inst)
|
||||
reconcile_vals = {
|
||||
'instance': exists.instance,
|
||||
'launched_at': exists.launched_at,
|
||||
'deleted_at': exists.deleted_at,
|
||||
'instance_type_id': exists.instance_type_id,
|
||||
'source': 'reconciler:mocked_client',
|
||||
'tenant': TENANT_ID_1,
|
||||
'os_architecture': DEFAULT_OS_ARCH,
|
||||
'os_distro': DEFAULT_OS_DISTRO,
|
||||
'os_version': DEFAULT_OS_VERSION,
|
||||
'rax_options': DEFAULT_RAX_OPTIONS,
|
||||
}
|
||||
result = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile(**reconcile_vals).AndReturn(result)
|
||||
result.save()
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.failed_validation(exists)
|
||||
self.assertTrue(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_failed_validation_deleted_not_matching(self):
|
||||
beginning_d = utils.decimal_utc()
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = beginning_d - (60*60)
|
||||
exists.launched_at = launched_at
|
||||
exists.instance_type_id = 1
|
||||
exists.deleted_at = beginning_d
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
exists.deployment().AndReturn(deployment)
|
||||
deployment.name = 'RegionOne.prod.cell1'
|
||||
rec_inst = self._fake_reconciler_instance(launched_at=launched_at,
|
||||
deleted=True,
|
||||
deleted_at=beginning_d+1)
|
||||
self.client.get_instance('RegionOne', INSTANCE_ID_1,
|
||||
get_metadata=True).AndReturn(rec_inst)
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.failed_validation(exists)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_failed_validation_deleted_not_deleted_from_client(self):
|
||||
beginning_d = utils.decimal_utc()
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = beginning_d - (60*60)
|
||||
exists.launched_at = launched_at
|
||||
exists.instance_type_id = 1
|
||||
exists.deleted_at = beginning_d
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
exists.deployment().AndReturn(deployment)
|
||||
deployment.name = 'RegionOne.prod.cell1'
|
||||
rec_inst = self._fake_reconciler_instance(launched_at=launched_at)
|
||||
self.client.get_instance('RegionOne', INSTANCE_ID_1,
|
||||
get_metadata=True).AndReturn(rec_inst)
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.failed_validation(exists)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_failed_validation_not_found(self):
|
||||
beginning_d = utils.decimal_utc()
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = beginning_d - (60*60)
|
||||
exists.launched_at = launched_at
|
||||
exists.instance_type_id = 1
|
||||
exists.deleted_at = None
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
exists.deployment().AndReturn(deployment)
|
||||
deployment.name = 'RegionOne.prod.cell1'
|
||||
ex = exceptions.NotFound()
|
||||
self.client.get_instance('RegionOne', INSTANCE_ID_1,
|
||||
get_metadata=True).AndRaise(ex)
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.failed_validation(exists)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_fields_match(self):
|
||||
exists = self._fake_usage(is_exists=True)
|
||||
kwargs = {'launched_at': exists.launched_at}
|
||||
instance = self._fake_reconciler_instance(**kwargs)
|
||||
self.mox.ReplayAll()
|
||||
match_code = self.reconciler._fields_match(exists, instance)
|
||||
self.assertEqual(match_code, 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_fields_match_field_with_deleted(self):
|
||||
exists = self._fake_usage(is_exists=True, is_deleted=True)
|
||||
kwargs = {'launched_at': exists.launched_at,
|
||||
'deleted': True,
|
||||
'deleted_at': exists.deleted_at}
|
||||
instance = self._fake_reconciler_instance(**kwargs)
|
||||
self.mox.ReplayAll()
|
||||
match_code = self.reconciler._fields_match(exists, instance)
|
||||
self.assertEqual(match_code, 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_fields_match_field_miss_match(self):
|
||||
exists = self._fake_usage(is_exists=True)
|
||||
kwargs = {'launched_at': exists.launched_at + 1}
|
||||
instance = self._fake_reconciler_instance(**kwargs)
|
||||
self.mox.ReplayAll()
|
||||
match_code = self.reconciler._fields_match(exists, instance)
|
||||
self.assertEqual(match_code, 1)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_fields_match_field_with_deleted_miss_match(self):
|
||||
exists = self._fake_usage(is_exists=True, is_deleted=True)
|
||||
kwargs = {'launched_at': exists.launched_at,
|
||||
'deleted': True,
|
||||
'deleted_at': exists.deleted_at+1}
|
||||
instance = self._fake_reconciler_instance(**kwargs)
|
||||
self.mox.ReplayAll()
|
||||
match_code = self.reconciler._fields_match(exists, instance)
|
||||
self.assertEqual(match_code, 2)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_fields_match_field_not_deleted_in_nova(self):
|
||||
exists = self._fake_usage(is_exists=True, is_deleted=True)
|
||||
kwargs = {'launched_at': exists.launched_at}
|
||||
instance = self._fake_reconciler_instance(**kwargs)
|
||||
self.mox.ReplayAll()
|
||||
match_code = self.reconciler._fields_match(exists, instance)
|
||||
self.assertEqual(match_code, 3)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_fields_match_field_not_deleted_in_exists(self):
|
||||
exists = self._fake_usage(is_exists=True)
|
||||
kwargs = {'launched_at': exists.launched_at,
|
||||
'deleted': True,
|
||||
'deleted_at': exists.launched_at + 1}
|
||||
instance = self._fake_reconciler_instance(**kwargs)
|
||||
self.mox.ReplayAll()
|
||||
match_code = self.reconciler._fields_match(exists, instance)
|
||||
self.assertEqual(match_code, 4)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
json_bridge_config = {
|
||||
'url': 'http://json_bridge.example.com/query/',
|
||||
'username': 'user',
|
||||
'password': 'pass',
|
||||
'databases': {
|
||||
'RegionOne': 'nova',
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class NovaJSONBridgeClientTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.client = nova.JSONBridgeClient(json_bridge_config)
|
||||
self.mox.StubOutWithMock(requests, 'post')
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
|
||||
def mock_for_query(self, database, query, results):
|
||||
url = json_bridge_config['url'] + database
|
||||
data = {'sql': query}
|
||||
auth = (json_bridge_config['username'], json_bridge_config['password'])
|
||||
result = {'result': results}
|
||||
response = self.mox.CreateMockAnything()
|
||||
requests.post(url, data, auth=auth, verify=False)\
|
||||
.AndReturn(response)
|
||||
response.json().AndReturn(result)
|
||||
|
||||
def _fake_instance(self, uuid=INSTANCE_ID_1, launched_at=None,
|
||||
terminated_at=None, deleted=0, instance_type_id=1,
|
||||
project_id=TENANT_ID_1):
|
||||
return {
|
||||
'uuid': uuid,
|
||||
'launched_at': launched_at,
|
||||
'terminated_at': terminated_at,
|
||||
'deleted': deleted,
|
||||
'instance_type_id': instance_type_id,
|
||||
'project_id': project_id
|
||||
}
|
||||
|
||||
def test_get_instance(self):
|
||||
launched_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=5)
|
||||
launched_at = str(launched_at)
|
||||
terminated_at = str(datetime.datetime.utcnow())
|
||||
results = [self._fake_instance(launched_at=launched_at,
|
||||
terminated_at=terminated_at,
|
||||
deleted=True)]
|
||||
self.mock_for_query('nova', nova.GET_INSTANCE_QUERY % INSTANCE_ID_1,
|
||||
results)
|
||||
self.mox.ReplayAll()
|
||||
instance = self.client.get_instance('RegionOne', INSTANCE_ID_1)
|
||||
self.assertIsNotNone(instance)
|
||||
self.assertEqual(instance['id'], INSTANCE_ID_1)
|
||||
self.assertEqual(instance['instance_type_id'], '1')
|
||||
launched_at_dec = stackutils.str_time_to_unix(launched_at)
|
||||
self.assertEqual(instance['launched_at'], launched_at_dec)
|
||||
terminated_at_dec = stackutils.str_time_to_unix(terminated_at)
|
||||
self.assertEqual(instance['deleted_at'], terminated_at_dec)
|
||||
self.assertTrue(instance['deleted'])
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def _fake_metadata(self):
|
||||
metadata = [
|
||||
{'key': 'image_org.openstack__1__architecture',
|
||||
'value': DEFAULT_OS_ARCH},
|
||||
{'key': 'image_org.openstack__1__os_distro',
|
||||
'value': DEFAULT_OS_DISTRO},
|
||||
{'key': 'image_org.openstack__1__os_version',
|
||||
'value': DEFAULT_OS_VERSION},
|
||||
{'key': 'image_com.rackspace__1__options',
|
||||
'value': DEFAULT_RAX_OPTIONS},
|
||||
]
|
||||
return metadata
|
||||
|
||||
def test_get_instance_with_metadata(self):
|
||||
launched_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=5)
|
||||
launched_at = str(launched_at)
|
||||
terminated_at = str(datetime.datetime.utcnow())
|
||||
results = [self._fake_instance(launched_at=launched_at,
|
||||
terminated_at=terminated_at,
|
||||
deleted=True)]
|
||||
metadata_results = self._fake_metadata()
|
||||
self.mock_for_query('nova', nova.GET_INSTANCE_QUERY % INSTANCE_ID_1,
|
||||
results)
|
||||
self.mock_for_query('nova',
|
||||
nova.GET_INSTANCE_SYSTEM_METADATA % INSTANCE_ID_1,
|
||||
metadata_results)
|
||||
self.mox.ReplayAll()
|
||||
instance = self.client.get_instance('RegionOne', INSTANCE_ID_1,
|
||||
get_metadata=True)
|
||||
self.assertIsNotNone(instance)
|
||||
self.assertEqual(instance['id'], INSTANCE_ID_1)
|
||||
self.assertEqual(instance['instance_type_id'], '1')
|
||||
launched_at_dec = stackutils.str_time_to_unix(launched_at)
|
||||
self.assertEqual(instance['launched_at'], launched_at_dec)
|
||||
terminated_at_dec = stackutils.str_time_to_unix(terminated_at)
|
||||
self.assertEqual(instance['deleted_at'], terminated_at_dec)
|
||||
self.assertTrue(instance['deleted'])
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_get_instance_not_found(self):
|
||||
self.mock_for_query('nova', nova.GET_INSTANCE_QUERY % INSTANCE_ID_1,
|
||||
[])
|
||||
self.mox.ReplayAll()
|
||||
self.assertRaises(exceptions.NotFound, self.client.get_instance,
|
||||
'RegionOne', INSTANCE_ID_1)
|
||||
self.mox.VerifyAll()
|
||||
@@ -21,10 +21,12 @@
|
||||
import datetime
|
||||
import decimal
|
||||
import json
|
||||
import time
|
||||
import unittest
|
||||
import uuid
|
||||
import multiprocessing
|
||||
|
||||
|
||||
from django.db import transaction
|
||||
import kombu.common
|
||||
import kombu.entity
|
||||
import kombu.pools
|
||||
@@ -71,14 +73,58 @@ class VerifierTestCase(unittest.TestCase):
|
||||
self.mox.StubOutWithMock(models, 'InstanceDeletes',
|
||||
use_mock_anything=True)
|
||||
models.InstanceDeletes.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceReconcile',
|
||||
use_mock_anything=True)
|
||||
models.InstanceReconcile.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceExists',
|
||||
use_mock_anything=True)
|
||||
models.InstanceExists.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'JsonReport', use_mock_anything=True)
|
||||
models.JsonReport.objects = self.mox.CreateMockAnything()
|
||||
self._setup_verifier()
|
||||
|
||||
def _setup_verifier(self):
|
||||
self.config = {
|
||||
"tick_time": 30,
|
||||
"settle_time": 5,
|
||||
"settle_units": "minutes",
|
||||
"pool_size": 2,
|
||||
"enable_notifications": False,
|
||||
}
|
||||
self.pool = self.mox.CreateMockAnything()
|
||||
self.reconciler = self.mox.CreateMockAnything()
|
||||
self.verifier = dbverifier.Verifier(self.config,
|
||||
pool=self.pool,
|
||||
rec=self.reconciler)
|
||||
|
||||
self.config_notif = {
|
||||
"tick_time": 30,
|
||||
"settle_time": 5,
|
||||
"settle_units": "minutes",
|
||||
"pool_size": 2,
|
||||
"enable_notifications": True,
|
||||
"rabbit": {
|
||||
"durable_queue": False,
|
||||
"host": "10.0.0.1",
|
||||
"port": 5672,
|
||||
"userid": "rabbit",
|
||||
"password": "rabbit",
|
||||
"virtual_host": "/",
|
||||
"exchange_name": "stacktach",
|
||||
}
|
||||
}
|
||||
self.pool_notif = self.mox.CreateMockAnything()
|
||||
self.reconciler_notif = self.mox.CreateMockAnything()
|
||||
self.verifier_notif = dbverifier.Verifier(self.config_notif,
|
||||
pool=self.pool_notif,
|
||||
rec=self.reconciler)
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
self.verifier = None
|
||||
self.pool = None
|
||||
self.verifier_notif = None
|
||||
self.pool_notif = None
|
||||
|
||||
def test_verify_for_launch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
@@ -437,6 +483,157 @@ class VerifierTestCase(unittest.TestCase):
|
||||
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
exists.launched_at = launched_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_min = decimal.Decimal('1')
|
||||
launched_max = decimal.Decimal('1.999999')
|
||||
filter = {
|
||||
'instance': INSTANCE_ID_1,
|
||||
'launched_at__gte': launched_min,
|
||||
'launched_at__lte': launched_max
|
||||
}
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
|
||||
recs.count().AndReturn(1)
|
||||
reconcile = self.mox.CreateMockAnything()
|
||||
reconcile.deleted_at = None
|
||||
recs[0].AndReturn(reconcile)
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
|
||||
dbverifier._verify_for_launch(exists, launch=reconcile,
|
||||
launch_type='InstanceReconcile')
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
|
||||
dbverifier._verify_for_delete(exists, delete=None,
|
||||
delete_type='InstanceReconcile')
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify_with_reconciled_data(exists)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_deleted(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_min = decimal.Decimal('1')
|
||||
launched_max = decimal.Decimal('1.999999')
|
||||
filter = {
|
||||
'instance': INSTANCE_ID_1,
|
||||
'launched_at__gte': launched_min,
|
||||
'launched_at__lte': launched_max
|
||||
}
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
|
||||
recs.count().AndReturn(1)
|
||||
reconcile = self.mox.CreateMockAnything()
|
||||
reconcile.deleted_at = deleted_at
|
||||
recs[0].AndReturn(reconcile)
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
|
||||
dbverifier._verify_for_launch(exists, launch=reconcile,
|
||||
launch_type='InstanceReconcile')
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
|
||||
dbverifier._verify_for_delete(exists, delete=reconcile,
|
||||
delete_type='InstanceReconcile')
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify_with_reconciled_data(exists)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_not_launched(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
exists.launched_at = None
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(VerificationException) as cm:
|
||||
dbverifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.reason, 'Exists without a launched_at')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_ambiguous_results(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_min = decimal.Decimal('1')
|
||||
launched_max = decimal.Decimal('1.999999')
|
||||
filter = {
|
||||
'instance': INSTANCE_ID_1,
|
||||
'launched_at__gte': launched_min,
|
||||
'launched_at__lte': launched_max
|
||||
}
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
|
||||
recs.count().AndReturn(2)
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(AmbiguousResults) as cm:
|
||||
dbverifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_instance_not_found(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(0)
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(NotFound) as cm:
|
||||
dbverifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_reconcile_not_found(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_min = decimal.Decimal('1')
|
||||
launched_max = decimal.Decimal('1.999999')
|
||||
filter = {
|
||||
'instance': INSTANCE_ID_1,
|
||||
'launched_at__gte': launched_min,
|
||||
'launched_at__lte': launched_max
|
||||
}
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
|
||||
recs.count().AndReturn(0)
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(NotFound) as cm:
|
||||
dbverifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_pass(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
@@ -448,7 +645,8 @@ class VerifierTestCase(unittest.TestCase):
|
||||
dbverifier._verify_for_delete(exist)
|
||||
dbverifier._mark_exist_verified(exist)
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify(exist)
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertTrue(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_no_launched_at(self):
|
||||
@@ -460,8 +658,29 @@ class VerifierTestCase(unittest.TestCase):
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
|
||||
dbverifier._mark_exist_failed(exist,
|
||||
reason="Exists without a launched_at")
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
|
||||
dbverifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify(exist)
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_fails_reconciled_verify_uses_second_exception(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
|
||||
ex1 = VerificationException('test1')
|
||||
dbverifier._verify_for_launch(exist).AndRaise(ex1)
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_failed')
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
|
||||
dbverifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(VerificationException('test2'))
|
||||
dbverifier._mark_exist_failed(exist, reason='test2')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_launch_fail(self):
|
||||
@@ -473,9 +692,48 @@ class VerifierTestCase(unittest.TestCase):
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
|
||||
verify_exception = VerificationException('test')
|
||||
dbverifier._verify_for_launch(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
|
||||
dbverifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
||||
dbverifier._mark_exist_failed(exist, reason='test')
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify(exist)
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_fail_reconciled_verify_success(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_failed')
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
|
||||
verify_exception = VerificationException('test')
|
||||
dbverifier._verify_for_launch(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
|
||||
dbverifier._verify_with_reconciled_data(exist)
|
||||
dbverifier._mark_exist_verified(exist)
|
||||
self.mox.ReplayAll()
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertTrue(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_fail_with_reconciled_data_exception(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_failed')
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
|
||||
verify_exception = VerificationException('test')
|
||||
dbverifier._verify_for_launch(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
|
||||
dbverifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(Exception())
|
||||
dbverifier._mark_exist_failed(exist, reason='Exception')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_delete_fail(self):
|
||||
@@ -488,9 +746,13 @@ class VerifierTestCase(unittest.TestCase):
|
||||
verify_exception = VerificationException('test')
|
||||
dbverifier._verify_for_launch(exist)
|
||||
dbverifier._verify_for_delete(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
|
||||
dbverifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
||||
dbverifier._mark_exist_failed(exist, reason='test')
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify(exist)
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_exception_during_launch(self):
|
||||
@@ -503,7 +765,8 @@ class VerifierTestCase(unittest.TestCase):
|
||||
dbverifier._verify_for_launch(exist).AndRaise(Exception())
|
||||
dbverifier._mark_exist_failed(exist, reason='Exception')
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify(exist)
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_exception_during_delete(self):
|
||||
@@ -517,11 +780,11 @@ class VerifierTestCase(unittest.TestCase):
|
||||
dbverifier._verify_for_delete(exist).AndRaise(Exception())
|
||||
dbverifier._mark_exist_failed(exist, reason='Exception')
|
||||
self.mox.ReplayAll()
|
||||
dbverifier._verify(exist)
|
||||
result, exists = dbverifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_range_without_callback(self):
|
||||
pool = self.mox.CreateMockAnything()
|
||||
when_max = datetime.datetime.utcnow()
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceExists.objects.select_related().AndReturn(results)
|
||||
@@ -540,17 +803,113 @@ class VerifierTestCase(unittest.TestCase):
|
||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
||||
exist1.save()
|
||||
exist2.save()
|
||||
pool.apply_async(dbverifier._verify, args=(exist1,), callback=None)
|
||||
pool.apply_async(dbverifier._verify, args=(exist2,), callback=None)
|
||||
self.pool.apply_async(dbverifier._verify, args=(exist1,),
|
||||
callback=None)
|
||||
self.pool.apply_async(dbverifier._verify, args=(exist2,),
|
||||
callback=None)
|
||||
self.mox.ReplayAll()
|
||||
dbverifier.verify_for_range(pool, when_max)
|
||||
self.verifier.verify_for_range(when_max)
|
||||
self.assertEqual(exist1.status, 'verifying')
|
||||
self.assertEqual(exist2.status, 'verifying')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_full(self):
|
||||
self.verifier.reconcile = True
|
||||
result_not_ready = self.mox.CreateMockAnything()
|
||||
result_not_ready.ready().AndReturn(False)
|
||||
result_unsuccessful = self.mox.CreateMockAnything()
|
||||
result_unsuccessful.ready().AndReturn(True)
|
||||
result_unsuccessful.successful().AndReturn(False)
|
||||
result_successful = self.mox.CreateMockAnything()
|
||||
result_successful.ready().AndReturn(True)
|
||||
result_successful.successful().AndReturn(True)
|
||||
result_successful.get().AndReturn((True, None))
|
||||
result_failed_verification = self.mox.CreateMockAnything()
|
||||
result_failed_verification.ready().AndReturn(True)
|
||||
result_failed_verification.successful().AndReturn(True)
|
||||
failed_exists = self.mox.CreateMockAnything()
|
||||
result_failed_verification.get().AndReturn((False, failed_exists))
|
||||
self.verifier.results = [result_not_ready,
|
||||
result_unsuccessful,
|
||||
result_successful,
|
||||
result_failed_verification]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier.clean_results()
|
||||
self.assertEqual(result_count, 1)
|
||||
self.assertEqual(success_count, 2)
|
||||
self.assertEqual(errored, 1)
|
||||
self.assertEqual(len(self.verifier.results), 1)
|
||||
self.assertEqual(self.verifier.results[0], result_not_ready)
|
||||
self.assertEqual(len(self.verifier.failed), 1)
|
||||
self.assertEqual(self.verifier.failed[0], result_failed_verification)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_pending(self):
|
||||
self.verifier.reconcile = True
|
||||
result_not_ready = self.mox.CreateMockAnything()
|
||||
result_not_ready.ready().AndReturn(False)
|
||||
self.verifier.results = [result_not_ready]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier.clean_results()
|
||||
self.assertEqual(result_count, 1)
|
||||
self.assertEqual(success_count, 0)
|
||||
self.assertEqual(errored, 0)
|
||||
self.assertEqual(len(self.verifier.results), 1)
|
||||
self.assertEqual(self.verifier.results[0], result_not_ready)
|
||||
self.assertEqual(len(self.verifier.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_successful(self):
|
||||
self.verifier.reconcile = True
|
||||
result_successful = self.mox.CreateMockAnything()
|
||||
result_successful.ready().AndReturn(True)
|
||||
result_successful.successful().AndReturn(True)
|
||||
result_successful.get().AndReturn((True, None))
|
||||
self.verifier.results = [result_successful]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier.clean_results()
|
||||
self.assertEqual(result_count, 0)
|
||||
self.assertEqual(success_count, 1)
|
||||
self.assertEqual(errored, 0)
|
||||
self.assertEqual(len(self.verifier.results), 0)
|
||||
self.assertEqual(len(self.verifier.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_unsuccessful(self):
|
||||
self.verifier.reconcile = True
|
||||
result_unsuccessful = self.mox.CreateMockAnything()
|
||||
result_unsuccessful.ready().AndReturn(True)
|
||||
result_unsuccessful.successful().AndReturn(False)
|
||||
self.verifier.results = [result_unsuccessful]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier.clean_results()
|
||||
self.assertEqual(result_count, 0)
|
||||
self.assertEqual(success_count, 0)
|
||||
self.assertEqual(errored, 1)
|
||||
self.assertEqual(len(self.verifier.results), 0)
|
||||
self.assertEqual(len(self.verifier.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_fail_verification(self):
|
||||
self.verifier.reconcile = True
|
||||
result_failed_verification = self.mox.CreateMockAnything()
|
||||
result_failed_verification.ready().AndReturn(True)
|
||||
result_failed_verification.successful().AndReturn(True)
|
||||
failed_exists = self.mox.CreateMockAnything()
|
||||
result_failed_verification.get().AndReturn((False, failed_exists))
|
||||
self.verifier.results = [result_failed_verification]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier.clean_results()
|
||||
self.assertEqual(result_count, 0)
|
||||
self.assertEqual(success_count, 1)
|
||||
self.assertEqual(errored, 0)
|
||||
self.assertEqual(len(self.verifier.results), 0)
|
||||
self.assertEqual(len(self.verifier.failed), 1)
|
||||
self.assertEqual(self.verifier.failed[0], failed_exists)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_range_with_callback(self):
|
||||
callback = self.mox.CreateMockAnything()
|
||||
pool = self.mox.CreateMockAnything()
|
||||
when_max = datetime.datetime.utcnow()
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceExists.objects.select_related().AndReturn(results)
|
||||
@@ -569,14 +928,30 @@ class VerifierTestCase(unittest.TestCase):
|
||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
||||
exist1.save()
|
||||
exist2.save()
|
||||
pool.apply_async(dbverifier._verify, args=(exist1,), callback=callback)
|
||||
pool.apply_async(dbverifier._verify, args=(exist2,), callback=callback)
|
||||
self.pool.apply_async(dbverifier._verify, args=(exist1,),
|
||||
callback=callback)
|
||||
self.pool.apply_async(dbverifier._verify, args=(exist2,),
|
||||
callback=callback)
|
||||
self.mox.ReplayAll()
|
||||
dbverifier.verify_for_range(pool, when_max, callback=callback)
|
||||
self.verifier.verify_for_range(when_max, callback=callback)
|
||||
self.assertEqual(exist1.status, 'verifying')
|
||||
self.assertEqual(exist2.status, 'verifying')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_reconcile_failed(self):
|
||||
self.verifier.reconcile = True
|
||||
exists1 = self.mox.CreateMockAnything()
|
||||
exists2 = self.mox.CreateMockAnything()
|
||||
self.verifier.failed = [exists1, exists2]
|
||||
self.reconciler.failed_validation(exists1).AndReturn(True)
|
||||
self.reconciler.failed_validation(exists2).AndReturn(False)
|
||||
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
|
||||
dbverifier._mark_exist_verified(exists1, reconciled=True)
|
||||
self.mox.ReplayAll()
|
||||
self.verifier.reconcile_failed()
|
||||
self.assertEqual(len(self.verifier.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_send_verified_notification_default_routing_key(self):
|
||||
connection = self.mox.CreateMockAnything()
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
@@ -649,140 +1024,193 @@ class VerifierTestCase(unittest.TestCase):
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_notifications(self):
|
||||
config = {
|
||||
"tick_time": 30,
|
||||
"settle_time": 5,
|
||||
"settle_units": "minutes",
|
||||
"pool_size": 2,
|
||||
"enable_notifications": True,
|
||||
"rabbit": {
|
||||
"durable_queue": False,
|
||||
"host": "10.0.0.1",
|
||||
"port": 5672,
|
||||
"userid": "rabbit",
|
||||
"password": "rabbit",
|
||||
"virtual_host": "/",
|
||||
"exchange_name": "stacktach"
|
||||
}
|
||||
}
|
||||
self.mox.StubOutWithMock(multiprocessing, 'Pool')
|
||||
pool = self.mox.CreateMockAnything()
|
||||
multiprocessing.Pool(2).AndReturn(pool)
|
||||
self.mox.StubOutWithMock(dbverifier, '_create_exchange')
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
dbverifier._create_exchange('stacktach', 'topic', durable=False)\
|
||||
.AndReturn(exchange)
|
||||
self.mox.StubOutWithMock(dbverifier, '_create_connection')
|
||||
conn = self.mox.CreateMockAnything()
|
||||
dbverifier._create_connection(config).AndReturn(conn)
|
||||
dbverifier._create_connection(self.config_notif).AndReturn(conn)
|
||||
conn.__enter__().AndReturn(conn)
|
||||
self.mox.StubOutWithMock(dbverifier, '_run')
|
||||
dbverifier._run(config, pool, callback=mox.IgnoreArg())
|
||||
self.mox.StubOutWithMock(self.verifier_notif, '_run')
|
||||
self.verifier_notif._run(callback=mox.Not(mox.Is(None)))
|
||||
conn.__exit__(None, None, None)
|
||||
self.mox.ReplayAll()
|
||||
dbverifier.run(config)
|
||||
self.verifier_notif.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_notifications_with_routing_keys(self):
|
||||
config = {
|
||||
"tick_time": 30,
|
||||
"settle_time": 5,
|
||||
"settle_units": "minutes",
|
||||
"pool_size": 2,
|
||||
"enable_notifications": True,
|
||||
"rabbit": {
|
||||
"durable_queue": False,
|
||||
"host": "10.0.0.1",
|
||||
"port": 5672,
|
||||
"userid": "rabbit",
|
||||
"password": "rabbit",
|
||||
"virtual_host": "/",
|
||||
"exchange_name": "stacktach",
|
||||
}
|
||||
}
|
||||
self.mox.StubOutWithMock(multiprocessing, 'Pool')
|
||||
pool = self.mox.CreateMockAnything()
|
||||
multiprocessing.Pool(2).AndReturn(pool)
|
||||
self.mox.StubOutWithMock(dbverifier, '_create_exchange')
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
dbverifier._create_exchange('stacktach', 'topic', durable=False) \
|
||||
.AndReturn(exchange)
|
||||
self.mox.StubOutWithMock(dbverifier, '_create_connection')
|
||||
conn = self.mox.CreateMockAnything()
|
||||
dbverifier._create_connection(config).AndReturn(conn)
|
||||
dbverifier._create_connection(self.config_notif).AndReturn(conn)
|
||||
conn.__enter__().AndReturn(conn)
|
||||
self.mox.StubOutWithMock(dbverifier, '_run')
|
||||
dbverifier._run(config, pool, callback=mox.IgnoreArg())
|
||||
self.mox.StubOutWithMock(self.verifier_notif, '_run')
|
||||
self.verifier_notif._run(callback=mox.Not(mox.Is(None)))
|
||||
conn.__exit__(None, None, None)
|
||||
self.mox.ReplayAll()
|
||||
dbverifier.run(config)
|
||||
self.verifier_notif.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_no_notifications(self):
|
||||
config = {
|
||||
"tick_time": 30,
|
||||
"settle_time": 5,
|
||||
"settle_units": "minutes",
|
||||
"pool_size": 2,
|
||||
"enable_notifications": False,
|
||||
}
|
||||
self.mox.StubOutWithMock(multiprocessing, 'Pool')
|
||||
pool = self.mox.CreateMockAnything()
|
||||
multiprocessing.Pool(2).AndReturn(pool)
|
||||
self.mox.StubOutWithMock(dbverifier, '_run')
|
||||
dbverifier._run(config, pool)
|
||||
self.mox.StubOutWithMock(self.verifier, '_run')
|
||||
self.verifier._run()
|
||||
self.mox.ReplayAll()
|
||||
dbverifier.run(config)
|
||||
self.verifier.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_once_notifications(self):
|
||||
config = {
|
||||
"tick_time": 30,
|
||||
"settle_time": 5,
|
||||
"settle_units": "minutes",
|
||||
"pool_size": 2,
|
||||
"enable_notifications": True,
|
||||
"rabbit": {
|
||||
"durable_queue": False,
|
||||
"host": "10.0.0.1",
|
||||
"port": 5672,
|
||||
"userid": "rabbit",
|
||||
"password": "rabbit",
|
||||
"virtual_host": "/",
|
||||
"exchange_name": "stacktach"
|
||||
}
|
||||
}
|
||||
self.mox.StubOutWithMock(multiprocessing, 'Pool')
|
||||
pool = self.mox.CreateMockAnything()
|
||||
multiprocessing.Pool(2).AndReturn(pool)
|
||||
self.mox.StubOutWithMock(dbverifier, '_create_exchange')
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
dbverifier._create_exchange('stacktach', 'topic', durable=False) \
|
||||
.AndReturn(exchange)
|
||||
self.mox.StubOutWithMock(dbverifier, '_create_connection')
|
||||
conn = self.mox.CreateMockAnything()
|
||||
dbverifier._create_connection(config).AndReturn(conn)
|
||||
dbverifier._create_connection(self.config_notif).AndReturn(conn)
|
||||
conn.__enter__().AndReturn(conn)
|
||||
self.mox.StubOutWithMock(dbverifier, '_run_once')
|
||||
dbverifier._run_once(config, pool, callback=mox.IgnoreArg())
|
||||
self.mox.StubOutWithMock(self.verifier_notif, '_run_once')
|
||||
self.verifier_notif._run_once(callback=mox.Not(mox.Is(None)))
|
||||
conn.__exit__(None, None, None)
|
||||
self.mox.ReplayAll()
|
||||
dbverifier.run_once(config)
|
||||
self.verifier_notif.run_once()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_once_no_notifications(self):
|
||||
config = {
|
||||
"tick_time": 30,
|
||||
"settle_time": 5,
|
||||
"settle_units": "minutes",
|
||||
"pool_size": 2,
|
||||
"enable_notifications": False,
|
||||
}
|
||||
self.mox.StubOutWithMock(multiprocessing, 'Pool')
|
||||
pool = self.mox.CreateMockAnything()
|
||||
multiprocessing.Pool(2).AndReturn(pool)
|
||||
self.mox.StubOutWithMock(dbverifier, '_run_once')
|
||||
dbverifier._run_once(config, pool)
|
||||
self.mox.StubOutWithMock(self.verifier, '_run_once')
|
||||
self.verifier._run_once()
|
||||
self.mox.ReplayAll()
|
||||
dbverifier.run_once(config)
|
||||
self.verifier.run_once()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_full_no_notifications(self):
|
||||
self.verifier.reconcile = True
|
||||
self.mox.StubOutWithMock(self.verifier, '_keep_running')
|
||||
self.verifier._keep_running().AndReturn(True)
|
||||
self.mox.StubOutWithMock(transaction, 'commit_on_success')
|
||||
fake_transaction = self.mox.CreateMockAnything()
|
||||
transaction.commit_on_success().AndReturn(fake_transaction)
|
||||
fake_transaction.__enter__()
|
||||
start = datetime.datetime.utcnow()
|
||||
self.mox.StubOutWithMock(self.verifier, '_utcnow')
|
||||
self.verifier._utcnow().AndReturn(start)
|
||||
settle_time = self.config['settle_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_offset = {settle_units: settle_time}
|
||||
ending_max = start - datetime.timedelta(**settle_offset)
|
||||
self.mox.StubOutWithMock(self.verifier, 'verify_for_range')
|
||||
self.verifier.verify_for_range(ending_max, callback=None)
|
||||
self.mox.StubOutWithMock(self.verifier, 'reconcile_failed')
|
||||
result1 = self.mox.CreateMockAnything()
|
||||
result2 = self.mox.CreateMockAnything()
|
||||
self.verifier.results = [result1, result2]
|
||||
result1.ready().AndReturn(True)
|
||||
result1.successful().AndReturn(True)
|
||||
result1.get().AndReturn((True, None))
|
||||
result2.ready().AndReturn(True)
|
||||
result2.successful().AndReturn(True)
|
||||
result2.get().AndReturn((True, None))
|
||||
self.verifier.reconcile_failed()
|
||||
fake_transaction.__exit__(None, None, None)
|
||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
||||
time.sleep(self.config['tick_time'])
|
||||
self.verifier._keep_running().AndReturn(False)
|
||||
self.mox.ReplayAll()
|
||||
self.verifier.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_full(self):
|
||||
self.verifier_notif.reconcile = True
|
||||
self.mox.StubOutWithMock(self.verifier_notif, '_keep_running')
|
||||
self.verifier_notif._keep_running().AndReturn(True)
|
||||
self.mox.StubOutWithMock(transaction, 'commit_on_success')
|
||||
fake_transaction = self.mox.CreateMockAnything()
|
||||
transaction.commit_on_success().AndReturn(fake_transaction)
|
||||
fake_transaction.__enter__()
|
||||
start = datetime.datetime.utcnow()
|
||||
self.mox.StubOutWithMock(self.verifier_notif, '_utcnow')
|
||||
self.verifier_notif._utcnow().AndReturn(start)
|
||||
settle_time = self.config['settle_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_offset = {settle_units: settle_time}
|
||||
ending_max = start - datetime.timedelta(**settle_offset)
|
||||
self.mox.StubOutWithMock(self.verifier_notif, 'verify_for_range')
|
||||
self.verifier_notif.verify_for_range(ending_max,
|
||||
callback=mox.Not(mox.Is(None)))
|
||||
self.mox.StubOutWithMock(self.verifier_notif, 'reconcile_failed')
|
||||
result1 = self.mox.CreateMockAnything()
|
||||
result2 = self.mox.CreateMockAnything()
|
||||
self.verifier_notif.results = [result1, result2]
|
||||
result1.ready().AndReturn(True)
|
||||
result1.successful().AndReturn(True)
|
||||
result1.get().AndReturn((True, None))
|
||||
result2.ready().AndReturn(True)
|
||||
result2.successful().AndReturn(True)
|
||||
result2.get().AndReturn((True, None))
|
||||
self.verifier_notif.reconcile_failed()
|
||||
fake_transaction.__exit__(None, None, None)
|
||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
||||
time.sleep(self.config['tick_time'])
|
||||
self.verifier_notif._keep_running().AndReturn(False)
|
||||
self.mox.ReplayAll()
|
||||
self.verifier_notif.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_once_full_no_notifications(self):
|
||||
self.verifier.reconcile = True
|
||||
start = datetime.datetime.utcnow()
|
||||
self.mox.StubOutWithMock(self.verifier, '_utcnow')
|
||||
self.verifier._utcnow().AndReturn(start)
|
||||
settle_time = self.config['settle_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_offset = {settle_units: settle_time}
|
||||
ending_max = start - datetime.timedelta(**settle_offset)
|
||||
self.mox.StubOutWithMock(self.verifier, 'verify_for_range')
|
||||
self.verifier.verify_for_range(ending_max, callback=None)
|
||||
result1 = self.mox.CreateMockAnything()
|
||||
result2 = self.mox.CreateMockAnything()
|
||||
self.verifier.results = [result1, result2]
|
||||
result1.ready().AndReturn(True)
|
||||
result1.successful().AndReturn(True)
|
||||
result1.get().AndReturn((True, None))
|
||||
result2.ready().AndReturn(True)
|
||||
result2.successful().AndReturn(True)
|
||||
result2.get().AndReturn((True, None))
|
||||
self.mox.StubOutWithMock(self.verifier, 'reconcile_failed')
|
||||
self.verifier.reconcile_failed()
|
||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
||||
time.sleep(self.config['tick_time'])
|
||||
self.mox.ReplayAll()
|
||||
self.verifier.run_once()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_once_full(self):
|
||||
self.verifier_notif.reconcile = True
|
||||
start = datetime.datetime.utcnow()
|
||||
self.mox.StubOutWithMock(self.verifier_notif, '_utcnow')
|
||||
self.verifier_notif._utcnow().AndReturn(start)
|
||||
settle_time = self.config['settle_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_offset = {settle_units: settle_time}
|
||||
ending_max = start - datetime.timedelta(**settle_offset)
|
||||
self.mox.StubOutWithMock(self.verifier_notif, 'verify_for_range')
|
||||
self.verifier_notif.verify_for_range(ending_max,
|
||||
callback=mox.Not(mox.Is(None)))
|
||||
result1 = self.mox.CreateMockAnything()
|
||||
result2 = self.mox.CreateMockAnything()
|
||||
self.verifier_notif.results = [result1, result2]
|
||||
result1.ready().AndReturn(True)
|
||||
result1.successful().AndReturn(True)
|
||||
result1.get().AndReturn((True, None))
|
||||
result2.ready().AndReturn(True)
|
||||
result2.successful().AndReturn(True)
|
||||
result2.get().AndReturn((True, None))
|
||||
self.mox.StubOutWithMock(self.verifier_notif, 'reconcile_failed')
|
||||
self.verifier_notif.reconcile_failed()
|
||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
||||
time.sleep(self.config['tick_time'])
|
||||
self.mox.ReplayAll()
|
||||
self.verifier_notif.run_once()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
@@ -23,7 +23,7 @@ import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from time import sleep
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from django.db import transaction
|
||||
@@ -44,6 +44,7 @@ LOG = stacklog.get_logger()
|
||||
|
||||
from stacktach import models
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
from stacktach import reconciler
|
||||
from verifier import AmbiguousResults
|
||||
from verifier import FieldMismatch
|
||||
from verifier import NotFound
|
||||
@@ -69,6 +70,15 @@ def _find_launch(instance, launched):
|
||||
return models.InstanceUsage.objects.filter(**params)
|
||||
|
||||
|
||||
def _find_reconcile(instance, launched):
|
||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
params = {'instance': instance,
|
||||
'launched_at__gte': dt.dt_to_decimal(start),
|
||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||
return models.InstanceReconcile.objects.filter(**params)
|
||||
|
||||
|
||||
def _find_delete(instance, launched, deleted_max=None):
|
||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
@@ -80,8 +90,16 @@ def _find_delete(instance, launched, deleted_max=None):
|
||||
return models.InstanceDeletes.objects.filter(**params)
|
||||
|
||||
|
||||
def _mark_exist_verified(exist):
|
||||
def _mark_exist_verified(exist,
|
||||
reconciled=False,
|
||||
reason=None):
|
||||
if not reconciled:
|
||||
exist.status = models.InstanceExists.VERIFIED
|
||||
else:
|
||||
exist.status = models.InstanceExists.RECONCILED
|
||||
if reason is not None:
|
||||
exist.fail_reason = reason
|
||||
|
||||
exist.save()
|
||||
|
||||
|
||||
@@ -152,10 +170,11 @@ def _verify_field_mismatch(exists, launch):
|
||||
launch.os_distro)
|
||||
|
||||
|
||||
def _verify_for_launch(exist):
|
||||
if exist.usage:
|
||||
def _verify_for_launch(exist, launch=None, launch_type="InstanceUsage"):
|
||||
|
||||
if not launch and exist.usage:
|
||||
launch = exist.usage
|
||||
else:
|
||||
elif not launch:
|
||||
if models.InstanceUsage.objects\
|
||||
.filter(instance=exist.instance).count() > 0:
|
||||
launches = _find_launch(exist.instance,
|
||||
@@ -166,23 +185,22 @@ def _verify_for_launch(exist):
|
||||
'launched_at': exist.launched_at
|
||||
}
|
||||
if count > 1:
|
||||
raise AmbiguousResults('InstanceUsage', query)
|
||||
raise AmbiguousResults(launch_type, query)
|
||||
elif count == 0:
|
||||
raise NotFound('InstanceUsage', query)
|
||||
raise NotFound(launch_type, query)
|
||||
launch = launches[0]
|
||||
else:
|
||||
raise NotFound('InstanceUsage', {'instance': exist.instance})
|
||||
raise NotFound(launch_type, {'instance': exist.instance})
|
||||
|
||||
_verify_field_mismatch(exist, launch)
|
||||
|
||||
|
||||
def _verify_for_delete(exist):
|
||||
def _verify_for_delete(exist, delete=None, delete_type="InstanceDelete"):
|
||||
|
||||
delete = None
|
||||
if exist.delete:
|
||||
if not delete and exist.delete:
|
||||
# We know we have a delete and we have it's id
|
||||
delete = exist.delete
|
||||
else:
|
||||
elif not delete:
|
||||
if exist.deleted_at:
|
||||
# We received this exists before the delete, go find it
|
||||
deletes = _find_delete(exist.instance,
|
||||
@@ -194,7 +212,7 @@ def _verify_for_delete(exist):
|
||||
'instance': exist.instance,
|
||||
'launched_at': exist.launched_at
|
||||
}
|
||||
raise NotFound('InstanceDelete', query)
|
||||
raise NotFound(delete_type, query)
|
||||
else:
|
||||
# We don't know if this is supposed to have a delete or not.
|
||||
# Thus, we need to check if we have a delete for this instance.
|
||||
@@ -206,7 +224,7 @@ def _verify_for_delete(exist):
|
||||
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
|
||||
deletes = _find_delete(exist.instance, launched_at, deleted_at_max)
|
||||
if deletes.count() > 0:
|
||||
reason = 'Found InstanceDeletes for non-delete exist'
|
||||
reason = 'Found %ss for non-delete exist' % delete_type
|
||||
raise VerificationException(reason)
|
||||
|
||||
if delete:
|
||||
@@ -221,6 +239,54 @@ def _verify_for_delete(exist):
|
||||
delete.deleted_at)
|
||||
|
||||
|
||||
def _verify_with_reconciled_data(exist):
|
||||
if not exist.launched_at:
|
||||
raise VerificationException("Exists without a launched_at")
|
||||
|
||||
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
|
||||
if query.count() > 0:
|
||||
recs = _find_reconcile(exist.instance,
|
||||
dt.dt_from_decimal(exist.launched_at))
|
||||
search_query = {'instance': exist.instance,
|
||||
'launched_at': exist.launched_at}
|
||||
count = recs.count()
|
||||
if count > 1:
|
||||
raise AmbiguousResults('InstanceReconcile', search_query)
|
||||
elif count == 0:
|
||||
raise NotFound('InstanceReconcile', search_query)
|
||||
reconcile = recs[0]
|
||||
else:
|
||||
raise NotFound('InstanceReconcile', {'instance': exist.instance})
|
||||
|
||||
_verify_for_launch(exist, launch=reconcile,
|
||||
launch_type="InstanceReconcile")
|
||||
delete = None
|
||||
if reconcile.deleted_at is not None:
|
||||
delete = reconcile
|
||||
_verify_for_delete(exist, delete=delete,
|
||||
delete_type="InstanceReconcile")
|
||||
|
||||
|
||||
def _attempt_reconciled_verify(exist, orig_e):
|
||||
verified = False
|
||||
try:
|
||||
# Attempt to verify against reconciled data
|
||||
_verify_with_reconciled_data(exist)
|
||||
verified = True
|
||||
_mark_exist_verified(exist)
|
||||
except NotFound, rec_e:
|
||||
# No reconciled data, just mark it failed
|
||||
_mark_exist_failed(exist, reason=str(orig_e))
|
||||
except VerificationException, rec_e:
|
||||
# Verification failed against reconciled data, mark it failed
|
||||
# using the second failure.
|
||||
_mark_exist_failed(exist, reason=str(rec_e))
|
||||
except Exception, rec_e:
|
||||
_mark_exist_failed(exist, reason=rec_e.__class__.__name__)
|
||||
LOG.exception(rec_e)
|
||||
return verified
|
||||
|
||||
|
||||
def _verify(exist):
|
||||
verified = False
|
||||
try:
|
||||
@@ -232,8 +298,9 @@ def _verify(exist):
|
||||
|
||||
verified = True
|
||||
_mark_exist_verified(exist)
|
||||
except VerificationException, e:
|
||||
_mark_exist_failed(exist, reason=str(e))
|
||||
except VerificationException, orig_e:
|
||||
# Something is wrong with the InstanceUsage record
|
||||
verified = _attempt_reconciled_verify(exist, orig_e)
|
||||
except Exception, e:
|
||||
_mark_exist_failed(exist, reason=e.__class__.__name__)
|
||||
LOG.exception(e)
|
||||
@@ -241,54 +308,6 @@ def _verify(exist):
|
||||
return verified, exist
|
||||
|
||||
|
||||
results = []
|
||||
|
||||
|
||||
def verify_for_range(pool, ending_max, callback=None):
|
||||
exists = _list_exists(ending_max=ending_max,
|
||||
status=models.InstanceExists.PENDING)
|
||||
count = exists.count()
|
||||
added = 0
|
||||
update_interval = datetime.timedelta(seconds=30)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
LOG.info("Adding %s exists to queue." % count)
|
||||
while added < count:
|
||||
for exist in exists[0:1000]:
|
||||
exist.status = models.InstanceExists.VERIFYING
|
||||
exist.save()
|
||||
result = pool.apply_async(_verify, args=(exist,),
|
||||
callback=callback)
|
||||
results.append(result)
|
||||
added += 1
|
||||
if datetime.datetime.utcnow() > next_update:
|
||||
values = ((added,) + clean_results())
|
||||
msg = "N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def clean_results():
|
||||
global results
|
||||
|
||||
pending = []
|
||||
finished = 0
|
||||
successful = 0
|
||||
|
||||
for result in results:
|
||||
if result.ready():
|
||||
finished += 1
|
||||
if result.successful():
|
||||
successful += 1
|
||||
else:
|
||||
pending.append(result)
|
||||
|
||||
results = pending
|
||||
errored = finished - successful
|
||||
return len(results), successful, errored
|
||||
|
||||
|
||||
def _send_notification(message, routing_key, connection, exchange):
|
||||
with kombu.pools.producers[connection].acquire(block=True) as producer:
|
||||
kombu.common.maybe_declare(exchange, producer.channel)
|
||||
@@ -325,81 +344,155 @@ def _create_connection(config):
|
||||
return kombu.connection.BrokerConnection(**conn_params)
|
||||
|
||||
|
||||
def _run(config, pool, callback=None):
|
||||
tick_time = config['tick_time']
|
||||
settle_units = config['settle_units']
|
||||
settle_time = config['settle_time']
|
||||
while True:
|
||||
class Verifier(object):
|
||||
|
||||
def __init__(self, config, pool=None, rec=None):
|
||||
self.config = config
|
||||
self.pool = pool or multiprocessing.Pool(self.config['pool_size'])
|
||||
self.reconcile = self.config.get('reconcile', False)
|
||||
self.reconciler = self._load_reconciler(config, rec=rec)
|
||||
self.results = []
|
||||
self.failed = []
|
||||
|
||||
def _load_reconciler(self, config, rec=None):
|
||||
if rec:
|
||||
return rec
|
||||
|
||||
if self.reconcile:
|
||||
config_loc = config.get('reconciler_config',
|
||||
'/etc/stacktach/reconciler_config.json')
|
||||
with open(config_loc, 'r') as rec_config_file:
|
||||
rec_config = json.load(rec_config_file)
|
||||
return reconciler.Reconciler(rec_config)
|
||||
|
||||
def clean_results(self):
|
||||
pending = []
|
||||
finished = 0
|
||||
successful = 0
|
||||
|
||||
for result in self.results:
|
||||
if result.ready():
|
||||
finished += 1
|
||||
if result.successful():
|
||||
(verified, exists) = result.get()
|
||||
if self.reconcile and not verified:
|
||||
self.failed.append(exists)
|
||||
successful += 1
|
||||
else:
|
||||
pending.append(result)
|
||||
|
||||
self.results = pending
|
||||
errored = finished - successful
|
||||
return len(self.results), successful, errored
|
||||
|
||||
def verify_for_range(self, ending_max, callback=None):
|
||||
exists = _list_exists(ending_max=ending_max,
|
||||
status=models.InstanceExists.PENDING)
|
||||
count = exists.count()
|
||||
added = 0
|
||||
update_interval = datetime.timedelta(seconds=30)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
LOG.info("Adding %s exists to queue." % count)
|
||||
while added < count:
|
||||
for exist in exists[0:1000]:
|
||||
exist.status = models.InstanceExists.VERIFYING
|
||||
exist.save()
|
||||
result = self.pool.apply_async(_verify, args=(exist,),
|
||||
callback=callback)
|
||||
self.results.append(result)
|
||||
added += 1
|
||||
if datetime.datetime.utcnow() > next_update:
|
||||
values = ((added,) + self.clean_results())
|
||||
msg = "N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
return count
|
||||
|
||||
def reconcile_failed(self):
|
||||
for failed_exist in self.failed:
|
||||
if self.reconciler.failed_validation(failed_exist):
|
||||
_mark_exist_verified(failed_exist, reconciled=True)
|
||||
self.failed = []
|
||||
|
||||
def _keep_running(self):
|
||||
return True
|
||||
|
||||
def _utcnow(self):
|
||||
return datetime.datetime.utcnow()
|
||||
|
||||
def _run(self, callback=None):
|
||||
tick_time = self.config['tick_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_time = self.config['settle_time']
|
||||
while self._keep_running():
|
||||
with transaction.commit_on_success():
|
||||
now = datetime.datetime.utcnow()
|
||||
now = self._utcnow()
|
||||
kwargs = {settle_units: settle_time}
|
||||
ending_max = now - datetime.timedelta(**kwargs)
|
||||
new = verify_for_range(pool, ending_max, callback=callback)
|
||||
|
||||
msg = "N: %s, P: %s, S: %s, E: %s" % ((new,) + clean_results())
|
||||
new = self.verify_for_range(ending_max,
|
||||
callback=callback)
|
||||
values = ((new,) + self.clean_results())
|
||||
if self.reconcile:
|
||||
self.reconcile_failed()
|
||||
msg = "N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
sleep(tick_time)
|
||||
time.sleep(tick_time)
|
||||
|
||||
|
||||
def run(config):
|
||||
pool = multiprocessing.Pool(config['pool_size'])
|
||||
|
||||
if config['enable_notifications']:
|
||||
exchange = _create_exchange(config['rabbit']['exchange_name'],
|
||||
def run(self):
|
||||
if self.config['enable_notifications']:
|
||||
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
|
||||
'topic',
|
||||
durable=config['rabbit']['durable_queue'])
|
||||
durable=self.config['rabbit']['durable_queue'])
|
||||
routing_keys = None
|
||||
if config['rabbit'].get('routing_keys') is not None:
|
||||
routing_keys = config['rabbit']['routing_keys']
|
||||
if self.config['rabbit'].get('routing_keys') is not None:
|
||||
routing_keys = self.config['rabbit']['routing_keys']
|
||||
|
||||
with _create_connection(config) as conn:
|
||||
with _create_connection(self.config) as conn:
|
||||
def callback(result):
|
||||
(verified, exist) = result
|
||||
if verified:
|
||||
send_verified_notification(exist, conn, exchange,
|
||||
routing_keys=routing_keys)
|
||||
|
||||
_run(config, pool, callback=callback)
|
||||
self._run(callback=callback)
|
||||
else:
|
||||
_run(config, pool)
|
||||
self._run()
|
||||
|
||||
|
||||
def _run_once(config, pool, callback=None):
|
||||
tick_time = config['tick_time']
|
||||
settle_units = config['settle_units']
|
||||
settle_time = config['settle_time']
|
||||
now = datetime.datetime.utcnow()
|
||||
def _run_once(self, callback=None):
|
||||
tick_time = self.config['tick_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_time = self.config['settle_time']
|
||||
now = self._utcnow()
|
||||
kwargs = {settle_units: settle_time}
|
||||
ending_max = now - datetime.timedelta(**kwargs)
|
||||
new = verify_for_range(pool, ending_max, callback=callback)
|
||||
new = self.verify_for_range(ending_max, callback=callback)
|
||||
|
||||
LOG.info("Verifying %s exist events" % new)
|
||||
while len(results) > 0:
|
||||
LOG.info("P: %s, F: %s, E: %s" % clean_results())
|
||||
sleep(tick_time)
|
||||
while len(self.results) > 0:
|
||||
LOG.info("P: %s, F: %s, E: %s" % self.clean_results())
|
||||
if self.reconcile:
|
||||
self.reconcile_failed()
|
||||
time.sleep(tick_time)
|
||||
|
||||
|
||||
def run_once(config):
|
||||
pool = multiprocessing.Pool(config['pool_size'])
|
||||
|
||||
if config['enable_notifications']:
|
||||
exchange = _create_exchange(config['rabbit']['exchange_name'],
|
||||
def run_once(self):
|
||||
if self.config['enable_notifications']:
|
||||
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
|
||||
'topic',
|
||||
durable=config['rabbit']['durable_queue'])
|
||||
durable=self.config['rabbit']['durable_queue'])
|
||||
routing_keys = None
|
||||
if config['rabbit'].get('routing_keys') is not None:
|
||||
routing_keys = config['rabbit']['routing_keys']
|
||||
if self.config['rabbit'].get('routing_keys') is not None:
|
||||
routing_keys = self.config['rabbit']['routing_keys']
|
||||
|
||||
with _create_connection(config) as conn:
|
||||
with _create_connection(self.config) as conn:
|
||||
def callback(result):
|
||||
(verified, exist) = result
|
||||
if verified:
|
||||
send_verified_notification(exist, conn, exchange,
|
||||
routing_keys=routing_keys)
|
||||
|
||||
_run_once(config, pool, callback=callback)
|
||||
self._run_once(callback=callback)
|
||||
else:
|
||||
_run_once(config, pool)
|
||||
self._run_once()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@@ -429,7 +522,8 @@ if __name__ == '__main__':
|
||||
config = {'tick_time': args.tick_time, 'settle_time': args.settle_time,
|
||||
'settle_units': args.settle_units, 'pool_size': args.pool_size}
|
||||
|
||||
verifier = Verifier(config)
|
||||
if args.run_once:
|
||||
run_once(config)
|
||||
verifier.run_once()
|
||||
else:
|
||||
run(config)
|
||||
verifier.run()
|
||||
|
||||
@@ -59,7 +59,13 @@ if __name__ == '__main__':
|
||||
with open(config_filename, "r") as f:
|
||||
config = json.load(f)
|
||||
|
||||
process = Process(target=dbverifier.run, args=(config, ))
|
||||
def make_and_start_verifier(config):
|
||||
# Gotta create it and run it this way so things don't get
|
||||
# lost when the process is forked.
|
||||
verifier = dbverifier.Verifier(config)
|
||||
verifier.run()
|
||||
|
||||
process = Process(target=make_and_start_verifier, args=(config,))
|
||||
process.start()
|
||||
signal.signal(signal.SIGINT, kill_time)
|
||||
signal.signal(signal.SIGTERM, kill_time)
|
||||
|
||||
@@ -33,7 +33,7 @@ case "$1" in
|
||||
/sbin/start-stop-daemon --start --pidfile $PIDFILE --make-pidfile -b --exec $DAEMON $ARGS
|
||||
;;
|
||||
status)
|
||||
status_of_proc "$DAEMON" "verifier" && exit 0 || exit $?
|
||||
status_of_proc -p "${PIDFILE}" "$DAEMON" "verifier" && exit 0 || exit $?
|
||||
;;
|
||||
*)
|
||||
echo "Usage: verifier.sh {start|stop|restart|status}"
|
||||
|
||||
@@ -33,7 +33,7 @@ case "$1" in
|
||||
/sbin/start-stop-daemon --start --pidfile $PIDFILE --make-pidfile -b --exec $DAEMON $ARGS
|
||||
;;
|
||||
status)
|
||||
status_of_proc "$DAEMON" "stacktach" && exit 0 || exit $?
|
||||
status_of_proc -p "${PIDFILE}" "$DAEMON" "stacktach" && exit 0 || exit $?
|
||||
;;
|
||||
*)
|
||||
echo "Usage: stacktach.sh {start|stop|restart|status}"
|
||||
|
||||
Reference in New Issue
Block a user