Merge "Add test data generator via oslo messaging"

This commit is contained in:
Jenkins 2015-01-06 22:15:12 +00:00 committed by Gerrit Code Review
commit 24a9831c64
2 changed files with 169 additions and 31 deletions

View File

@ -23,7 +23,8 @@ Usage:
Generate testing data for e.g. for default time span
source .tox/py27/bin/activate
./tools/make_test_data.py --user 1 --project 1 1 cpu_util 20
./tools/make_test_data.py --user 1 --project 1 --resource 1 --counter cpu_util
--volume 20
"""
from __future__ import print_function
@ -32,6 +33,7 @@ import datetime
import logging
import random
import sys
import uuid
from oslo.config import cfg
from oslo.utils import timeutils
@ -41,10 +43,10 @@ from ceilometer import sample
from ceilometer import storage
def make_test_data(conn, name, meter_type, unit, volume, random_min,
def make_test_data(name, meter_type, unit, volume, random_min,
random_max, user_id, project_id, resource_id, start,
end, interval, resource_metadata={}, source='artificial',):
end, interval, resource_metadata=None, source='artificial'):
resource_metadata = resource_metadata or {}
# Compute start and end timestamps for the new data.
if isinstance(start, datetime.datetime):
timestamp = start
@ -83,7 +85,8 @@ def make_test_data(conn, name, meter_type, unit, volume, random_min,
data = utils.meter_message_from_counter(
c,
cfg.CONF.publisher.metering_secret)
conn.record_metering_data(data)
yield data
n += 1
timestamp = timestamp + increment
@ -96,9 +99,12 @@ def make_test_data(conn, name, meter_type, unit, volume, random_min,
print('Added %d new events for meter %s.' % (n, name))
def main():
cfg.CONF([], project='ceilometer')
def record_test_data(conn, *args, **kwargs):
for data in make_test_data(*args, **kwargs):
conn.record_metering_data(data)
def get_parser():
parser = argparse.ArgumentParser(
description='generate metering data',
)
@ -116,6 +122,7 @@ def main():
)
parser.add_argument(
'--end',
type=int,
default=2,
help='Number of days to be stepped forward from now or date in the '
'future ("YYYY-MM-DDTHH:MM:SS" format) to define timestamps end '
@ -125,6 +132,7 @@ def main():
'--type',
choices=('gauge', 'cumulative'),
default='gauge',
dest='meter_type',
help='Counter type.',
)
parser.add_argument(
@ -134,10 +142,12 @@ def main():
)
parser.add_argument(
'--project',
dest='project_id',
help='Project id of owner.',
)
parser.add_argument(
'--user',
dest='user_id',
help='User id of owner.',
)
parser.add_argument(
@ -153,20 +163,30 @@ def main():
default=0,
)
parser.add_argument(
'resource',
'--resource',
dest='resource_id',
default=str(uuid.uuid4()),
help='The resource id for the meter data.',
)
parser.add_argument(
'counter',
'--counter',
default='instance',
dest='name',
help='The counter name for the meter data.',
)
parser.add_argument(
'volume',
'--volume',
help='The amount to attach to the meter.',
type=int,
default=1,
)
args = parser.parse_args()
return parser
def main():
cfg.CONF([], project='ceilometer')
args = get_parser().parse_args()
# Set up logging to use the console
console = logging.StreamHandler(sys.stderr)
@ -181,11 +201,11 @@ def main():
conn = storage.get_connection_from_config(cfg.CONF)
# Find the user and/or project for a real resource
if not (args.user or args.project):
if not (args.user_id or args.project_id):
for r in conn.get_resources():
if r.resource_id == args.resource:
args.user = r.user_id
args.project = r.project_id
if r.resource_id == args.resource_id:
args.user_id = r.user_id
args.project_id = r.project_id
break
# Compute the correct time span
@ -208,24 +228,12 @@ def main():
end = datetime.datetime.strptime(args.end, format)
except ValueError:
raise
make_test_data(conn=conn,
name=args.counter,
meter_type=args.type,
unit=args.unit,
volume=args.volume,
random_min=args.random_min,
random_max=args.random_max,
user_id=args.user,
project_id=args.project,
resource_id=args.resource,
start=start,
end=end,
interval=args.interval,
resource_metadata={},
source='artificial',)
args.start = start
args.end = end
record_test_data(conn=conn, **args.__dict__)
return 0
if __name__ == '__main__':
main()

130
tools/send_test_data.py Normal file
View File

@ -0,0 +1,130 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command line tool for sending test data for Ceilometer via oslo.messaging.
Usage:
Send messages with samples generated by make_test_data
source .tox/py27/bin/activate
./tools/send_test_data.py --count 1000 --resources_count 10 --topic metering
"""
import argparse
import datetime
import json
import random
import uuid
import make_test_data
from oslo_context import context
from ceilometer import messaging
from ceilometer import service
def send_batch(rpc_client, topic, batch):
rpc_client.prepare(topic=topic).cast(context.RequestContext(),
'record_metering_data', data=batch)
def get_rpc_client(config_file):
service.prepare_service(argv=['/', '--config-file', config_file])
transport = messaging.get_transport()
rpc_client = messaging.get_rpc_client(transport, version='1.0')
return rpc_client
def generate_data(rpc_client, make_data_args, samples_count,
batch_size, resources_count, topic):
make_data_args.interval = 1
make_data_args.start = (datetime.datetime.utcnow() -
datetime.timedelta(minutes=samples_count))
make_data_args.end = datetime.datetime.utcnow()
make_data_args.resource_id = None
resources_list = [str(uuid.uuid4())
for _ in xrange(resources_count)]
resource_samples = {resource: 0 for resource in resources_list}
batch = []
count = 0
for sample in make_test_data.make_test_data(**make_data_args.__dict__):
count += 1
resource = resources_list[random.randint(0, len(resources_list) - 1)]
resource_samples[resource] += 1
sample['resource_id'] = resource
batch.append(sample)
if len(batch) == batch_size:
send_batch(rpc_client, topic, batch)
batch = []
if count == samples_count:
send_batch(rpc_client, topic, batch)
return resource_samples
send_batch(rpc_client, topic, batch)
return resource_samples
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--batch-size',
dest='batch_size',
type=int,
default=100
)
parser.add_argument(
'--config-file',
default='/etc/ceilometer/ceilometer.conf'
)
parser.add_argument(
'--topic',
default='perfmetering'
)
parser.add_argument(
'--samples-count',
dest='samples_count',
type=int,
default=1000
)
parser.add_argument(
'--resources-count',
dest='resources_count',
type=int,
default=100
)
parser.add_argument(
'--result-directory',
dest='result_dir',
default='/tmp'
)
return parser
def main():
args = get_parser().parse_known_args()[0]
make_data_args = make_test_data.get_parser().parse_known_args()[0]
rpc_client = get_rpc_client(args.config_file)
result_dir = args.result_dir
del args.config_file
del args.result_dir
resource_writes = generate_data(rpc_client, make_data_args,
**args.__dict__)
result_file = "%s/sample-by-resource-%s" % (result_dir,
random.getrandbits(32))
with open(result_file, 'w') as f:
f.write(json.dumps(resource_writes))
return result_file
if __name__ == '__main__':
main()