Initial commit
Change-Id: Id4181336c0de78c50ec944073314d8bf74bc6d5a
This commit is contained in:
parent
96e6d1af9a
commit
26719e3c48
1
.dockerignore
Symbolic link
1
.dockerignore
Symbolic link
@ -0,0 +1 @@
|
||||
.gitignore
|
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
.tox
|
||||
__pycache__
|
||||
*.pyc
|
||||
.pytest_cache
|
||||
.eggs
|
||||
.stestr
|
||||
*.egg-info
|
||||
.coverage
|
33
.zuul.yaml
Normal file
33
.zuul.yaml
Normal file
@ -0,0 +1,33 @@
|
||||
- job:
|
||||
name: atmosphere:image:build
|
||||
parent: vexxhost-build-docker-image
|
||||
provides: atmosphere:images
|
||||
vars: &atmosphere_images
|
||||
docker_images:
|
||||
- context: .
|
||||
repository: vexxhost/atmosphere-ingress
|
||||
target: atmosphere-ingress
|
||||
|
||||
- job:
|
||||
name: atmosphere:image:upload
|
||||
parent: vexxhost-upload-docker-image
|
||||
provides: atmosphere:images
|
||||
vars: *atmosphere_images
|
||||
|
||||
- job:
|
||||
name: atmosphere:image:promote
|
||||
parent: vexxhost-promote-docker-image
|
||||
vars: *atmosphere_images
|
||||
|
||||
- project:
|
||||
check:
|
||||
jobs:
|
||||
- tox-py37
|
||||
- atmosphere:image:build
|
||||
gate:
|
||||
jobs:
|
||||
- tox-py37
|
||||
- atmosphere:image:upload
|
||||
promote:
|
||||
jobs:
|
||||
- atmosphere:image:promote
|
29
Dockerfile
Normal file
29
Dockerfile
Normal file
@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM docker.io/opendevorg/python-builder as builder
|
||||
COPY . /tmp/src
|
||||
RUN assemble
|
||||
|
||||
FROM docker.io/opendevorg/uwsgi-base AS atmosphere
|
||||
COPY --from=builder /output/ /output
|
||||
RUN rm -rfv /output/packages.txt && \
|
||||
/output/install-from-bindep
|
||||
EXPOSE 8080
|
||||
ENV FLASK_APP=atmosphere.app \
|
||||
UWSGI_HTTP_SOCKET=:8080
|
||||
|
||||
FROM atmosphere AS atmosphere-ingress
|
||||
ENV UWSGI_WSGI_FILE=/usr/local/bin/atmosphere-ingress-wsgi
|
0
atmosphere/__init__.py
Normal file
0
atmosphere/__init__.py
Normal file
0
atmosphere/api/__init__.py
Normal file
0
atmosphere/api/__init__.py
Normal file
53
atmosphere/api/ingress.py
Normal file
53
atmosphere/api/ingress.py
Normal file
@ -0,0 +1,53 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from flask import Blueprint
|
||||
from flask import request
|
||||
from flask import abort
|
||||
from flask import jsonify
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from atmosphere.app import create_app
|
||||
from atmosphere import exceptions
|
||||
from atmosphere import utils
|
||||
from atmosphere import models
|
||||
|
||||
blueprint = Blueprint('ingress', __name__)
|
||||
|
||||
|
||||
def init_application(config=None):
|
||||
app = create_app(config)
|
||||
app.register_blueprint(blueprint)
|
||||
return app
|
||||
|
||||
|
||||
@blueprint.route('/v1/event', methods=['POST'])
|
||||
def event():
|
||||
if request.json is None:
|
||||
abort(400)
|
||||
|
||||
for event in request.json:
|
||||
print(jsonify(event).get_data(True))
|
||||
event = utils.normalize_event(event)
|
||||
|
||||
try:
|
||||
resource = models.Resource.get_or_create(event)
|
||||
except (exceptions.EventTooOld, exceptions.IgnoredEvent):
|
||||
return '', 202
|
||||
|
||||
# TODO(mnaser): Drop this logging eventually...
|
||||
print(jsonify(event).get_data(True))
|
||||
print(jsonify(resource.serialize).get_data(True))
|
||||
|
||||
return '', 204
|
43
atmosphere/app.py
Normal file
43
atmosphere/app.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from flask import Flask
|
||||
|
||||
from atmosphere import models
|
||||
|
||||
|
||||
def create_app(config=None):
|
||||
app = Flask(__name__)
|
||||
|
||||
if config is not None:
|
||||
app.config.from_object(config)
|
||||
|
||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||
if app.config.get('SQLALCHEMY_DATABASE_URI') is None:
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = \
|
||||
os.environ.get('DATABASE_URI', 'sqlite:///:memory:')
|
||||
if app.config['DEBUG']:
|
||||
app.config['SQLALCHEMY_ECHO'] = True
|
||||
|
||||
models.db.init_app(app)
|
||||
|
||||
package_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
migrations_path = os.path.join(package_dir, 'migrations')
|
||||
models.migrate.init_app(app, models.db, directory=migrations_path)
|
||||
|
||||
return app
|
||||
|
||||
|
31
atmosphere/exceptions.py
Normal file
31
atmosphere/exceptions.py
Normal file
@ -0,0 +1,31 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from werkzeug import exceptions
|
||||
|
||||
|
||||
class UnsupportedEventType(exceptions.BadRequest):
|
||||
description = 'Unsupported event type'
|
||||
|
||||
|
||||
class MultipleOpenPeriods(exceptions.Conflict):
|
||||
description = 'Multiple open periods'
|
||||
|
||||
|
||||
class IgnoredEvent(Exception):
|
||||
description = 'Ignored event type'
|
||||
|
||||
|
||||
class EventTooOld(Exception):
|
||||
pass
|
1
atmosphere/migrations/README
Normal file
1
atmosphere/migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
45
atmosphere/migrations/alembic.ini
Normal file
45
atmosphere/migrations/alembic.ini
Normal file
@ -0,0 +1,45 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
96
atmosphere/migrations/env.py
Normal file
96
atmosphere/migrations/env.py
Normal file
@ -0,0 +1,96 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
from flask import current_app
|
||||
config.set_main_option(
|
||||
'sqlalchemy.url',
|
||||
str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%'))
|
||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# this callback is used to prevent an auto-migration from being generated
|
||||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
atmosphere/migrations/script.py.mako
Normal file
24
atmosphere/migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,30 @@
|
||||
"""Added indexes for period times.
|
||||
|
||||
Revision ID: 90ae5785df01
|
||||
Revises: d0f896c02017
|
||||
Create Date: 2020-06-14 20:03:28.965285
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '90ae5785df01'
|
||||
down_revision = 'd0f896c02017'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_period_ended_at'), 'period', ['ended_at'], unique=False)
|
||||
op.create_index(op.f('ix_period_started_at'), 'period', ['started_at'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_period_started_at'), table_name='period')
|
||||
op.drop_index(op.f('ix_period_ended_at'), table_name='period')
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,61 @@
|
||||
"""Initial migration.
|
||||
|
||||
Revision ID: d0f896c02017
|
||||
Revises:
|
||||
Create Date: 2020-06-14 14:50:26.172441
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from atmosphere.models import BigIntegerDateTime
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd0f896c02017'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('resource',
|
||||
sa.Column('uuid', sa.String(length=36), nullable=False),
|
||||
sa.Column('type', sa.String(length=32), nullable=False),
|
||||
sa.Column('project', sa.String(length=32), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('uuid')
|
||||
)
|
||||
op.create_table('spec',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('type', sa.String(length=32), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('instance_spec',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('instance_type', sa.String(length=255), nullable=True),
|
||||
sa.Column('state', sa.String(length=255), nullable=True),
|
||||
sa.ForeignKeyConstraint(['id'], ['spec.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('instance_type', 'state')
|
||||
)
|
||||
op.create_table('period',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('resource_uuid', sa.String(length=36), nullable=False),
|
||||
sa.Column('started_at', BigIntegerDateTime(), nullable=False),
|
||||
sa.Column('ended_at', BigIntegerDateTime(), nullable=True),
|
||||
sa.Column('spec_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['resource_uuid'], ['resource.uuid'], ),
|
||||
sa.ForeignKeyConstraint(['spec_id'], ['spec.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('period')
|
||||
op.drop_table('instance_spec')
|
||||
op.drop_table('spec')
|
||||
op.drop_table('resource')
|
||||
# ### end Alembic commands ###
|
265
atmosphere/models.py
Normal file
265
atmosphere/models.py
Normal file
@ -0,0 +1,265 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_migrate import Migrate
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import exc
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
|
||||
from atmosphere import exceptions
|
||||
from atmosphere import utils
|
||||
|
||||
db = SQLAlchemy()
|
||||
migrate = Migrate()
|
||||
|
||||
|
||||
MONTH_START = relativedelta(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
class GetOrCreateMixin:
|
||||
@classmethod
|
||||
def get_or_create(self, event):
|
||||
query = self.query_from_event(event)
|
||||
new_instance = self.from_event(event)
|
||||
|
||||
db_instance = query.first()
|
||||
if db_instance is None:
|
||||
db_instance = new_instance
|
||||
|
||||
db.session.begin(nested=True)
|
||||
try:
|
||||
db.session.add(db_instance)
|
||||
db.session.commit()
|
||||
except (exc.IntegrityError, orm_exc.FlushError):
|
||||
db.session.rollback()
|
||||
db_instance = query.one()
|
||||
|
||||
return db_instance
|
||||
|
||||
|
||||
class Resource(db.Model, GetOrCreateMixin):
|
||||
uuid = db.Column(db.String(36), primary_key=True)
|
||||
type = db.Column(db.String(32), nullable=False)
|
||||
project = db.Column(db.String(32), nullable=False)
|
||||
updated_at = db.Column(db.DateTime, nullable=False)
|
||||
|
||||
periods = db.relationship('Period', backref='resource', lazy='joined')
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_on': type
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_event(self, event):
|
||||
cls, _ = utils.get_model_type_from_event(event['event_type'])
|
||||
|
||||
return cls(
|
||||
uuid=event['traits']['resource_id'],
|
||||
project=event['traits']['project_id'],
|
||||
updated_at=event['generated'],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def query_from_event(self, event):
|
||||
cls, _ = utils.get_model_type_from_event(event['event_type'])
|
||||
|
||||
return cls.query.filter_by(
|
||||
uuid=event['traits']['resource_id'],
|
||||
project=event['traits']['project_id'],
|
||||
).with_for_update()
|
||||
|
||||
@classmethod
|
||||
def get_or_create(self, event):
|
||||
resource = super(Resource, self).get_or_create(event)
|
||||
|
||||
# If the last update is newer than our last update, we assume that
|
||||
# another event has been processed that is newer (so we should ignore
|
||||
# this one).
|
||||
time = event['generated']
|
||||
if resource.updated_at is not None and resource.updated_at > time:
|
||||
raise exceptions.EventTooOld()
|
||||
|
||||
# Update the last updated_at time now so any older events get rejected
|
||||
db.session.commit()
|
||||
|
||||
# Check if we should ignore event
|
||||
if resource.__class__.is_event_ignored(event):
|
||||
raise exceptions.IgnoredEvent
|
||||
|
||||
# Retrieve spec for this event
|
||||
spec = Spec.get_or_create(event)
|
||||
|
||||
# No existing period, start our first period.
|
||||
if len(resource.periods) == 0:
|
||||
resource.periods.append(Period(
|
||||
started_at=event['traits']['created_at'],
|
||||
spec=spec
|
||||
))
|
||||
|
||||
# Grab the current open period to manipulate it
|
||||
period = resource.get_open_period()
|
||||
|
||||
# If we don't have an open period, there's nothing to do.
|
||||
if period is None:
|
||||
raise exceptions.EventTooOld()
|
||||
|
||||
# If we're deleted, then we close the current period.
|
||||
if 'deleted_at' in event['traits']:
|
||||
period.ended_at = event['traits']['deleted_at']
|
||||
elif period.spec != spec:
|
||||
period.ended_at = event['generated']
|
||||
|
||||
resource.periods.append(Period(
|
||||
started_at=event['generated'],
|
||||
spec=spec,
|
||||
))
|
||||
|
||||
# Bump updated_at to event time (in order to avoid conflicts)
|
||||
resource.updated_at = time
|
||||
db.session.commit()
|
||||
|
||||
return resource
|
||||
|
||||
def get_open_period(self):
|
||||
open_periods = list(filter(lambda p: p.ended_at is None, self.periods))
|
||||
if len(open_periods) > 1:
|
||||
raise exceptions.MultipleOpenPeriods
|
||||
if len(open_periods) == 0:
|
||||
return None
|
||||
return open_periods[0]
|
||||
|
||||
@property
|
||||
def serialize(self):
|
||||
"""Return object data in easily serializable format"""
|
||||
|
||||
return {
|
||||
'uuid': self.uuid,
|
||||
'type': self.type,
|
||||
'project': self.project,
|
||||
'updated_at': self.updated_at,
|
||||
'periods': [p.serialize for p in self.periods],
|
||||
}
|
||||
|
||||
|
||||
class Instance(Resource):
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': 'OS::Nova::Server'
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def is_event_ignored(self, event):
|
||||
vm_state_is_deleted = (event['traits']['state'] == 'deleted')
|
||||
no_deleted_at = ('deleted_at' not in event['traits'])
|
||||
|
||||
if vm_state_is_deleted and no_deleted_at:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class BigIntegerDateTime(TypeDecorator):
|
||||
impl = db.BigInteger
|
||||
|
||||
def process_bind_param(self, value, _):
|
||||
if value is None:
|
||||
return None
|
||||
assert isinstance(value, datetime)
|
||||
return value.timestamp() * 1000
|
||||
|
||||
def process_result_value(self, value, _):
|
||||
if value is None:
|
||||
return None
|
||||
return datetime.fromtimestamp(value / 1000)
|
||||
|
||||
|
||||
class Period(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
resource_uuid = db.Column(db.String(36), db.ForeignKey('resource.uuid'),
|
||||
nullable=False)
|
||||
started_at = db.Column(BigIntegerDateTime, nullable=False, index=True)
|
||||
ended_at = db.Column(BigIntegerDateTime, index=True)
|
||||
|
||||
spec_id = db.Column(db.Integer, db.ForeignKey('spec.id'), nullable=False)
|
||||
spec = db.relationship("Spec")
|
||||
|
||||
@property
|
||||
def seconds(self):
|
||||
ended_at = self.ended_at
|
||||
if ended_at is None:
|
||||
ended_at = datetime.now()
|
||||
return (ended_at - self.started_at).total_seconds()
|
||||
|
||||
@property
|
||||
def serialize(self):
|
||||
"""Return object data in easily serializable format"""
|
||||
|
||||
return {
|
||||
'started_at': self.started_at,
|
||||
'ended_at': self.ended_at,
|
||||
'seconds': self.seconds,
|
||||
'spec': self.spec.serialize,
|
||||
}
|
||||
|
||||
|
||||
class Spec(db.Model, GetOrCreateMixin):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
type = db.Column(db.String(32))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_on': type
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_event(self, event):
|
||||
_, cls = utils.get_model_type_from_event(event['event_type'])
|
||||
spec = {c.name: event['traits'][c.name]
|
||||
for c in cls.__table__.columns if c.name != 'id'}
|
||||
|
||||
return cls(**spec)
|
||||
|
||||
@classmethod
|
||||
def query_from_event(self, event):
|
||||
_, cls = utils.get_model_type_from_event(event['event_type'])
|
||||
spec = {c.name: event['traits'][c.name]
|
||||
for c in cls.__table__.columns if c.name != 'id'}
|
||||
|
||||
return cls.query.filter_by(**spec)
|
||||
|
||||
|
||||
class InstanceSpec(Spec):
|
||||
id = db.Column(db.Integer, db.ForeignKey('spec.id'), primary_key=True)
|
||||
instance_type = db.Column(db.String(255))
|
||||
state = db.Column(db.String(255))
|
||||
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint('instance_type', 'state'),
|
||||
)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': 'OS::Nova::Server',
|
||||
}
|
||||
|
||||
@property
|
||||
def serialize(self):
|
||||
"""Return object data in easily serializable format"""
|
||||
|
||||
return {
|
||||
'instance_type': self.instance_type,
|
||||
'state': self.state,
|
||||
}
|
0
atmosphere/tests/__init__.py
Normal file
0
atmosphere/tests/__init__.py
Normal file
0
atmosphere/tests/unit/__init__.py
Normal file
0
atmosphere/tests/unit/__init__.py
Normal file
84
atmosphere/tests/unit/api/test_ingress.py
Normal file
84
atmosphere/tests/unit/api/test_ingress.py
Normal file
@ -0,0 +1,84 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import pytest
|
||||
|
||||
from atmosphere.tests.unit import fake
|
||||
from atmosphere import models
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("client", "db_session")
|
||||
class TestEvent:
|
||||
def test_with_no_json_provided(self, client):
|
||||
response = client.post('/v1/event')
|
||||
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_with_one_event_provided(self, client):
|
||||
event = fake.get_event()
|
||||
response = client.post('/v1/event', json=[event])
|
||||
|
||||
assert response.status_code == 204
|
||||
assert models.Resource.query.count() == 1
|
||||
assert models.Period.query.count() == 1
|
||||
assert models.Spec.query.count() == 1
|
||||
|
||||
def test_with_multiple_events_provided(self, client):
|
||||
event_1 = fake.get_event(resource_id='fake-resource-1')
|
||||
event_2 = fake.get_event(resource_id='fake-resource-2')
|
||||
|
||||
response = client.post('/v1/event', json=[event_1, event_2])
|
||||
|
||||
assert response.status_code == 204
|
||||
assert models.Resource.query.count() == 2
|
||||
assert models.Period.query.count() == 2
|
||||
assert models.Spec.query.count() == 1
|
||||
|
||||
def test_with_old_event_provided(self, client):
|
||||
event_new = fake.get_event()
|
||||
event_new['generated'] = '2020-06-07T01:42:54.736337'
|
||||
response = client.post('/v1/event', json=[event_new])
|
||||
|
||||
assert response.status_code == 204
|
||||
assert models.Resource.query.count() == 1
|
||||
assert models.Period.query.count() == 1
|
||||
assert models.Spec.query.count() == 1
|
||||
|
||||
event_old = fake.get_event()
|
||||
event_old['generated'] = '2020-06-07T01:40:54.736337'
|
||||
response = client.post('/v1/event', json=[event_old])
|
||||
|
||||
assert response.status_code == 202
|
||||
assert models.Resource.query.count() == 1
|
||||
assert models.Period.query.count() == 1
|
||||
assert models.Spec.query.count() == 1
|
||||
|
||||
def test_with_invalid_event_provided(self, client):
|
||||
event = fake.get_event(event_type='foo.bar.exists')
|
||||
response = client.post('/v1/event', json=[event])
|
||||
|
||||
assert response.status_code == 400
|
||||
assert models.Resource.query.count() == 0
|
||||
assert models.Period.query.count() == 0
|
||||
assert models.Spec.query.count() == 0
|
||||
|
||||
def test_with_ignored_event_provided(self, client, ignored_event):
|
||||
event = fake.get_event(event_type=ignored_event)
|
||||
response = client.post('/v1/event', json=[event])
|
||||
|
||||
assert response.status_code == 202
|
||||
assert models.Resource.query.count() == 0
|
||||
assert models.Period.query.count() == 0
|
||||
assert models.Spec.query.count() == 0
|
53
atmosphere/tests/unit/conftest.py
Normal file
53
atmosphere/tests/unit/conftest.py
Normal file
@ -0,0 +1,53 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pytest
|
||||
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
|
||||
from atmosphere.app import create_app
|
||||
from atmosphere.api import ingress
|
||||
from atmosphere.models import db
|
||||
|
||||
|
||||
@pytest.fixture(params=[
|
||||
'aggregate.cache_images.progress',
|
||||
'compute_task.build_instances.error',
|
||||
'compute.exception',
|
||||
'flavor.create',
|
||||
'keypair.create.end',
|
||||
'libvirt.connect.error',
|
||||
'metrics.update',
|
||||
'scheduler.select_destinations.end',
|
||||
'server_group.add_member',
|
||||
'service.create',
|
||||
'volume.usage',
|
||||
])
|
||||
def ignored_event(request):
|
||||
yield request.param
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
app = create_app()
|
||||
app.config['TESTING'] = True
|
||||
app.register_blueprint(ingress.blueprint)
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _db(app):
|
||||
db.init_app(app)
|
||||
db.create_all()
|
||||
return db
|
68
atmosphere/tests/unit/fake.py
Normal file
68
atmosphere/tests/unit/fake.py
Normal file
@ -0,0 +1,68 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from atmosphere import models
|
||||
from atmosphere import utils
|
||||
|
||||
|
||||
def get_event(resource_id='fake-uuid', event_type='compute.instance.exists'):
|
||||
return dict({
|
||||
'generated': '2020-06-07T01:42:54.736337',
|
||||
'event_type': event_type,
|
||||
'traits': [
|
||||
["service", 1, "compute.devstack"],
|
||||
["request_id", 1, "req-cc707e71-8ea7-4646-afb6-65a8d1023c1a"],
|
||||
["created_at", 4, "2020-06-07T01:42:52"],
|
||||
["resource_id", 1, resource_id],
|
||||
["project_id", 1, "fake-project"],
|
||||
["instance_type", 1, "v1-standard-1"],
|
||||
["state", 1, "ACTIVE"],
|
||||
]
|
||||
})
|
||||
|
||||
|
||||
def get_normalized_event():
|
||||
event = get_event()
|
||||
return utils.normalize_event(event)
|
||||
|
||||
|
||||
def get_resource(type='OS::Nova::Server'):
|
||||
return models.Resource(uuid='fake-uuid', type=type,
|
||||
project='fake-project',
|
||||
updated_at=datetime.datetime.now())
|
||||
|
||||
|
||||
def get_instance_spec(**kwargs):
|
||||
if not kwargs:
|
||||
kwargs = {'instance_type': 'v2-standard-1', 'state': 'ACTIVE'}
|
||||
return models.InstanceSpec(**kwargs)
|
||||
|
||||
|
||||
def get_resource_with_periods(number):
|
||||
resource = get_resource()
|
||||
|
||||
spec = get_instance_spec()
|
||||
models.db.session.add(spec)
|
||||
|
||||
for i in range(number):
|
||||
period = models.Period(spec=spec)
|
||||
period.started_at = datetime.datetime.now() + relativedelta(hour=+i)
|
||||
period.ended_at = period.started_at + relativedelta(hour=+1)
|
||||
resource.periods.append(period)
|
||||
|
||||
return resource
|
30
atmosphere/tests/unit/test_app.py
Normal file
30
atmosphere/tests/unit/test_app.py
Normal file
@ -0,0 +1,30 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from atmosphere import app
|
||||
|
||||
|
||||
class TestApp:
|
||||
def test_sqlalchemy_database_uri_from_env(self, monkeypatch):
|
||||
monkeypatch.setenv("DATABASE_URI", "foobar")
|
||||
|
||||
test_app = app.create_app()
|
||||
assert test_app.config['SQLALCHEMY_DATABASE_URI'] == 'foobar'
|
||||
|
||||
def test_debug_enables_sqlalchemy_echo(self):
|
||||
class FakeConfig:
|
||||
DEBUG = True
|
||||
|
||||
test_app = app.create_app(FakeConfig)
|
||||
assert test_app.config['SQLALCHEMY_ECHO'] == True
|
412
atmosphere/tests/unit/test_models.py
Normal file
412
atmosphere/tests/unit/test_models.py
Normal file
@ -0,0 +1,412 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import exc
|
||||
from sqlalchemy import func
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from freezegun import freeze_time
|
||||
import before_after
|
||||
|
||||
from atmosphere import models
|
||||
from atmosphere import exceptions
|
||||
from atmosphere.tests.unit import fake
|
||||
|
||||
|
||||
class GetOrCreateTestMixin:
|
||||
def test_with_existing_object(self):
|
||||
event = fake.get_normalized_event()
|
||||
assert self.MODEL.query_from_event(event).count() == 0
|
||||
|
||||
old_object = self.MODEL.get_or_create(event)
|
||||
assert self.MODEL.query_from_event(event).count() == 1
|
||||
|
||||
new_object = self.MODEL.get_or_create(event)
|
||||
assert self.MODEL.query_from_event(event).count() == 1
|
||||
|
||||
assert old_object == new_object
|
||||
|
||||
def test_with_no_existing_object(self):
|
||||
event = fake.get_normalized_event()
|
||||
assert self.MODEL.query_from_event(event).count() == 0
|
||||
|
||||
new_object = self.MODEL.get_or_create(event)
|
||||
assert self.MODEL.query_from_event(event).count() == 1
|
||||
|
||||
def test_with_object_created_during_creation(self):
|
||||
event = fake.get_normalized_event()
|
||||
assert self.MODEL.query_from_event(event).count() == 0
|
||||
|
||||
def before_session_begin(*args, **kwargs):
|
||||
self.MODEL.get_or_create(event)
|
||||
with before_after.before('atmosphere.models.db.session.begin',
|
||||
before_session_begin):
|
||||
self.MODEL.get_or_create(event)
|
||||
|
||||
assert self.MODEL.query_from_event(event).count() == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db_session")
|
||||
class TestResource(GetOrCreateTestMixin):
|
||||
MODEL = models.Resource
|
||||
|
||||
def test_from_event(self):
|
||||
event = fake.get_normalized_event()
|
||||
resource = models.Resource.from_event(event)
|
||||
|
||||
assert resource.uuid == event['traits']['resource_id']
|
||||
assert resource.project == event['traits']['project_id']
|
||||
assert resource.updated_at == event['generated']
|
||||
|
||||
@mock.patch('flask_sqlalchemy._QueryProperty.__get__')
|
||||
def test_query_from_event(self, mock_query_property_getter):
|
||||
mock_filter_by = mock_query_property_getter.return_value.filter_by
|
||||
|
||||
event = fake.get_normalized_event()
|
||||
query = models.Resource.query_from_event(event)
|
||||
|
||||
mock_filter_by.assert_called_with(
|
||||
project='fake-project',
|
||||
uuid='fake-uuid',
|
||||
)
|
||||
|
||||
def test_get_or_create_with_old_event(self):
|
||||
event = fake.get_normalized_event()
|
||||
new_object = models.Resource.get_or_create(event)
|
||||
|
||||
old_event = event.copy()
|
||||
old_event['generated'] = event['generated'] + \
|
||||
relativedelta(microseconds=-1)
|
||||
|
||||
with pytest.raises(exceptions.EventTooOld) as e:
|
||||
models.Resource.get_or_create(old_event)
|
||||
|
||||
def test_get_or_create_refresh_updated_at(self):
|
||||
event = fake.get_normalized_event()
|
||||
old_object = models.Resource.get_or_create(event)
|
||||
|
||||
new_event = event.copy()
|
||||
new_event['generated'] = event['generated'] + \
|
||||
relativedelta(microseconds=+1)
|
||||
|
||||
new_object = models.Resource.get_or_create(new_event)
|
||||
|
||||
assert new_object.updated_at == new_event['generated']
|
||||
assert models.Resource.query_from_event(event).count() == 1
|
||||
|
||||
def test_get_or_create_using_created_at(self):
|
||||
event = fake.get_normalized_event()
|
||||
resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert resource.get_open_period().started_at == \
|
||||
event['traits']['created_at']
|
||||
|
||||
def test_get_or_create_using_deleted_event_only(self):
|
||||
event = fake.get_normalized_event()
|
||||
event['traits']['deleted_at'] = event['traits']['created_at'] + \
|
||||
relativedelta(hours=+1)
|
||||
|
||||
resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert resource.get_open_period() is None
|
||||
assert len(resource.periods) == 1
|
||||
assert resource.periods[0].ended_at == event['traits']['deleted_at']
|
||||
assert resource.periods[0].seconds == 3600
|
||||
|
||||
def test_get_or_create_using_multiple_deleted_events(self):
|
||||
event = fake.get_normalized_event()
|
||||
event['traits']['deleted_at'] = event['traits']['created_at'] + \
|
||||
relativedelta(hours=+1)
|
||||
|
||||
models.Resource.get_or_create(event)
|
||||
with pytest.raises(exceptions.EventTooOld) as e:
|
||||
models.Resource.get_or_create(event)
|
||||
|
||||
def test_get_or_create_using_deleted_event(self):
|
||||
event = fake.get_normalized_event()
|
||||
old_resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert old_resource.get_open_period() is not None
|
||||
assert len(old_resource.periods) == 1
|
||||
|
||||
event['traits']['deleted_at'] = event['traits']['created_at'] + \
|
||||
relativedelta(hours=+1)
|
||||
new_resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert old_resource == new_resource
|
||||
assert new_resource.get_open_period() is None
|
||||
assert len(new_resource.periods) == 1
|
||||
assert new_resource.periods[0].ended_at == \
|
||||
event['traits']['deleted_at']
|
||||
assert new_resource.periods[0].seconds == 3600
|
||||
|
||||
def test_get_or_create_using_updated_spec(self):
|
||||
event = fake.get_normalized_event()
|
||||
old_resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert old_resource.get_open_period() is not None
|
||||
assert len(old_resource.periods) == 1
|
||||
|
||||
event['traits']['instance_type'] = 'v1-standard-2'
|
||||
event['generated'] += relativedelta(hours=+1)
|
||||
new_resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert old_resource == new_resource
|
||||
assert new_resource.get_open_period() is not None
|
||||
assert len(new_resource.periods) == 2
|
||||
|
||||
assert new_resource.periods[0].ended_at == event['generated']
|
||||
assert new_resource.get_open_period().started_at == event['generated']
|
||||
|
||||
def test_get_or_create_using_same_spec(self):
|
||||
event = fake.get_normalized_event()
|
||||
old_resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert old_resource.get_open_period() is not None
|
||||
assert len(old_resource.periods) == 1
|
||||
|
||||
event['generated'] += relativedelta(hours=+1)
|
||||
new_resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert old_resource == new_resource
|
||||
assert old_resource.periods == new_resource.periods
|
||||
assert new_resource.get_open_period() is not None
|
||||
assert len(new_resource.periods) == 1
|
||||
|
||||
def test_serialize_with_no_periods(self):
|
||||
resource = fake.get_resource()
|
||||
|
||||
assert resource.serialize == {
|
||||
'uuid': resource.uuid,
|
||||
'type': resource.type,
|
||||
'project': resource.project,
|
||||
'updated_at': resource.updated_at,
|
||||
'periods': [],
|
||||
}
|
||||
|
||||
def test_serialize(self):
|
||||
resource = fake.get_resource_with_periods(20)
|
||||
|
||||
assert resource.serialize == {
|
||||
'uuid': resource.uuid,
|
||||
'type': resource.type,
|
||||
'project': resource.project,
|
||||
'updated_at': resource.updated_at,
|
||||
'periods': [p.serialize for p in resource.periods],
|
||||
}
|
||||
|
||||
def test_number_of_periods_with_no_periods(self):
|
||||
resource = fake.get_resource_with_periods(0)
|
||||
models.db.session.add(resource)
|
||||
models.db.session.commit()
|
||||
|
||||
assert len(resource.periods) == 0
|
||||
|
||||
def test_number_of_periods_with_periods(self):
|
||||
resource = fake.get_resource_with_periods(20)
|
||||
models.db.session.add(resource)
|
||||
models.db.session.commit()
|
||||
|
||||
assert len(resource.periods) == 20
|
||||
|
||||
def test_get_open_period_with_no_open(self):
|
||||
resource = fake.get_resource_with_periods(20)
|
||||
models.db.session.add(resource)
|
||||
models.db.session.commit()
|
||||
|
||||
assert resource.get_open_period() is None
|
||||
|
||||
def test_get_open_period_with_only_one_open_period(self):
|
||||
resource = fake.get_resource()
|
||||
spec = fake.get_instance_spec()
|
||||
|
||||
period = models.Period(spec=spec)
|
||||
period.started_at = datetime.datetime.now()
|
||||
resource.periods.append(period)
|
||||
|
||||
models.db.session.add(resource)
|
||||
models.db.session.commit()
|
||||
|
||||
assert len(resource.periods) == 1
|
||||
assert resource.get_open_period() == period
|
||||
|
||||
def test_get_open_period_with_multiple_open_periods(self):
|
||||
resource = fake.get_resource()
|
||||
spec = fake.get_instance_spec()
|
||||
|
||||
for _ in range(2):
|
||||
period = models.Period(spec=spec)
|
||||
period.started_at = datetime.datetime.now()
|
||||
resource.periods.append(period)
|
||||
|
||||
models.db.session.add(resource)
|
||||
models.db.session.commit()
|
||||
|
||||
with pytest.raises(exceptions.MultipleOpenPeriods) as e:
|
||||
resource.get_open_period()
|
||||
|
||||
assert e.value.code == 409
|
||||
assert e.value.description == "Multiple open periods"
|
||||
|
||||
def test_get_open_period_with_multiple_periods(self):
|
||||
resource = fake.get_resource_with_periods(20)
|
||||
|
||||
period = models.Period(spec=resource.periods[-1].spec)
|
||||
period.started_at = datetime.datetime.now()
|
||||
resource.periods.append(period)
|
||||
|
||||
models.db.session.add(resource)
|
||||
models.db.session.commit()
|
||||
|
||||
assert len(resource.periods) == 21
|
||||
assert resource.get_open_period() == period
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db_session")
|
||||
class TestInstance:
|
||||
def test_is_event_ignored(self):
|
||||
event = fake.get_normalized_event()
|
||||
assert models.Instance.is_event_ignored(event) == False
|
||||
|
||||
def test_is_event_ignored_for_pending_delete(self):
|
||||
event = fake.get_normalized_event()
|
||||
event['event_type'] = 'compute.instance.delete.start'
|
||||
event['traits']['state'] = 'deleted'
|
||||
assert models.Instance.is_event_ignored(event) == True
|
||||
|
||||
def test_is_event_ignored_for_deleted(self):
|
||||
event = fake.get_normalized_event()
|
||||
event['event_type'] = 'compute.instance.delete.start'
|
||||
event['traits']['state'] = 'deleted'
|
||||
event['traits']['deleted_at'] = event['generated']
|
||||
assert models.Instance.is_event_ignored(event) == False
|
||||
|
||||
def test_get_or_create_has_no_deleted_period(self):
|
||||
event = fake.get_normalized_event()
|
||||
resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert resource.get_open_period() is not None
|
||||
assert len(resource.periods) == 1
|
||||
|
||||
event['event_type'] = 'compute.instance.delete.start'
|
||||
event['traits']['state'] = 'deleted'
|
||||
event['generated'] += relativedelta(hours=+1)
|
||||
|
||||
with pytest.raises(exceptions.IgnoredEvent) as e:
|
||||
models.Resource.get_or_create(event)
|
||||
|
||||
assert resource.get_open_period() is not None
|
||||
assert len(resource.periods) == 1
|
||||
|
||||
event['traits']['deleted_at'] = event['generated']
|
||||
event['generated'] += relativedelta(seconds=+2)
|
||||
resource = models.Resource.get_or_create(event)
|
||||
|
||||
assert resource.get_open_period() is None
|
||||
assert len(resource.periods) == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db_session")
|
||||
class TestPeriod:
|
||||
def test_serialize_without_start(self):
|
||||
spec = fake.get_instance_spec()
|
||||
period = models.Period(spec=spec)
|
||||
|
||||
resource = fake.get_resource()
|
||||
resource.periods.append(period)
|
||||
|
||||
models.db.session.add(resource)
|
||||
with pytest.raises(exc.IntegrityError):
|
||||
models.db.session.commit()
|
||||
|
||||
def test_serialize_without_ending(self):
|
||||
now = datetime.datetime.now()
|
||||
started_at = now + relativedelta(hours=-1)
|
||||
|
||||
spec = fake.get_instance_spec()
|
||||
period = models.Period(
|
||||
started_at=started_at,
|
||||
spec=spec
|
||||
)
|
||||
|
||||
resource = fake.get_resource()
|
||||
resource.periods.append(period)
|
||||
|
||||
with freeze_time(now):
|
||||
assert datetime.datetime.now() == now
|
||||
assert period.serialize == {
|
||||
'started_at': started_at,
|
||||
'ended_at': None,
|
||||
'seconds': 3600,
|
||||
'spec': spec.serialize
|
||||
}
|
||||
|
||||
def test_serialize(self):
|
||||
started_at = datetime.datetime.now()
|
||||
ended_at = started_at + relativedelta(hours=+1)
|
||||
|
||||
spec = fake.get_instance_spec()
|
||||
period = models.Period(
|
||||
started_at=started_at,
|
||||
ended_at=ended_at,
|
||||
spec=spec
|
||||
)
|
||||
|
||||
resource = fake.get_resource()
|
||||
resource.periods.append(period)
|
||||
|
||||
assert period.serialize == {
|
||||
'started_at': started_at,
|
||||
'ended_at': ended_at,
|
||||
'seconds': 3600,
|
||||
'spec': spec.serialize
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db_session")
|
||||
class TestSpec(GetOrCreateTestMixin):
|
||||
MODEL = models.Spec
|
||||
|
||||
def test_from_event(self):
|
||||
event = fake.get_normalized_event()
|
||||
spec = models.Spec.from_event(event)
|
||||
|
||||
assert spec.instance_type == 'v1-standard-1'
|
||||
assert spec.state == 'ACTIVE'
|
||||
|
||||
@mock.patch('flask_sqlalchemy._QueryProperty.__get__')
|
||||
def test_query_from_event(self, mock_query_property_getter):
|
||||
mock_filter_by = mock_query_property_getter.return_value.filter_by
|
||||
|
||||
event = fake.get_normalized_event()
|
||||
query = models.Spec.query_from_event(event)
|
||||
|
||||
mock_filter_by.assert_called_with(
|
||||
instance_type='v1-standard-1',
|
||||
state='ACTIVE'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db_session")
|
||||
class TestInstanceSpec:
|
||||
def test_serialize(self):
|
||||
spec = fake.get_instance_spec()
|
||||
|
||||
assert spec.serialize == {
|
||||
'instance_type': spec.instance_type,
|
||||
'state': spec.state,
|
||||
}
|
61
atmosphere/tests/unit/test_utils.py
Normal file
61
atmosphere/tests/unit/test_utils.py
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from atmosphere.tests.unit import fake
|
||||
from atmosphere import exceptions
|
||||
from atmosphere import models
|
||||
from atmosphere import utils
|
||||
|
||||
|
||||
class TestNormalizeEvent:
|
||||
def test_normalize_event(self):
|
||||
event = fake.get_event()
|
||||
event_expected = fake.get_event()
|
||||
event_expected.update({
|
||||
"generated": datetime.datetime(2020, 6, 7, 1, 42, 54, 736337),
|
||||
"traits": {
|
||||
"service": "compute.devstack",
|
||||
"request_id": "req-cc707e71-8ea7-4646-afb6-65a8d1023c1a",
|
||||
"created_at": datetime.datetime(2020, 6, 7, 1, 42, 52),
|
||||
"project_id": "fake-project",
|
||||
"resource_id": "fake-uuid",
|
||||
"instance_type": "v1-standard-1",
|
||||
"state": "ACTIVE",
|
||||
}
|
||||
})
|
||||
|
||||
assert utils.normalize_event(event) == event_expected
|
||||
|
||||
|
||||
class TestModelTypeDetection:
|
||||
def test_compute_instance(self):
|
||||
assert utils.get_model_type_from_event('compute.instance.exists') == \
|
||||
(models.Instance, models.InstanceSpec)
|
||||
|
||||
def test_ignored_resource(self, ignored_event):
|
||||
with pytest.raises(exceptions.IgnoredEvent) as e:
|
||||
utils.get_model_type_from_event(ignored_event)
|
||||
|
||||
assert e.value.description == "Ignored event type"
|
||||
|
||||
def test_unknown_resource(self):
|
||||
with pytest.raises(exceptions.UnsupportedEventType) as e:
|
||||
utils.get_model_type_from_event('foobar')
|
||||
|
||||
assert e.value.code == 400
|
||||
assert e.value.description == "Unsupported event type"
|
60
atmosphere/utils.py
Normal file
60
atmosphere/utils.py
Normal file
@ -0,0 +1,60 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ceilometer.event import models as ceilometer_models
|
||||
from dateutil import parser
|
||||
|
||||
from atmosphere import exceptions
|
||||
from atmosphere import models
|
||||
|
||||
|
||||
def normalize_event(event):
|
||||
event['generated'] = parser.parse(event['generated'])
|
||||
event['traits'] = {
|
||||
k: ceilometer_models.Trait.convert_value(t, v)
|
||||
for (k, t, v) in event['traits']
|
||||
}
|
||||
|
||||
return event
|
||||
|
||||
|
||||
def get_model_type_from_event(event):
|
||||
if event.startswith('compute.instance'):
|
||||
return models.Instance, models.InstanceSpec
|
||||
if event.startswith('aggregate.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('compute_task.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('compute.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('flavor.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('keypair.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('libvirt.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('metrics.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('scheduler.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('server_group.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event.startswith('service.'):
|
||||
raise exceptions.IgnoredEvent
|
||||
if event == 'volume.usage':
|
||||
raise exceptions.IgnoredEvent
|
||||
|
||||
raise exceptions.UnsupportedEventType
|
||||
|
||||
|
2
bindep.txt
Normal file
2
bindep.txt
Normal file
@ -0,0 +1,2 @@
|
||||
gcc [compile]
|
||||
libc-dev [compile]
|
6
requirements.txt
Normal file
6
requirements.txt
Normal file
@ -0,0 +1,6 @@
|
||||
ceilometer
|
||||
Flask
|
||||
Flask-Migrate
|
||||
Flask-SQLAlchemy
|
||||
python-dateutil
|
||||
PyMySQL
|
13
setup.cfg
Normal file
13
setup.cfg
Normal file
@ -0,0 +1,13 @@
|
||||
[metadata]
|
||||
name = atmosphere
|
||||
|
||||
[files]
|
||||
packages =
|
||||
atmosphere
|
||||
|
||||
[entry_points]
|
||||
wsgi_scripts =
|
||||
atmosphere-ingress-wsgi = atmosphere.api.ingress:init_application
|
||||
|
||||
[tool:pytest]
|
||||
mocked-sessions=atmosphere.models.db.session
|
19
setup.py
Normal file
19
setup.py
Normal file
@ -0,0 +1,19 @@
|
||||
# Copyright 2020 VEXXHOST, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import setuptools
|
||||
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr'],
|
||||
pbr=True)
|
8
test-requirements.txt
Normal file
8
test-requirements.txt
Normal file
@ -0,0 +1,8 @@
|
||||
before_after
|
||||
flake8
|
||||
freezegun
|
||||
pylint
|
||||
pytest
|
||||
pytest-cov
|
||||
pytest-flask
|
||||
pytest-flask-sqlalchemy
|
36
tox.ini
Normal file
36
tox.ini
Normal file
@ -0,0 +1,36 @@
|
||||
[tox]
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
envdir = {toxworkdir}/shared
|
||||
usedevelop = True
|
||||
setenv =
|
||||
FLASK_APP=atmosphere.app
|
||||
passenv =
|
||||
DATABASE_URI
|
||||
deps =
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
-r{toxinidir}/requirements.txt
|
||||
commands =
|
||||
pytest --cov-report term-missing \
|
||||
--cov-report term:skip-covered \
|
||||
--cov=atmosphere
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[testenv:linters]
|
||||
commands =
|
||||
pylint atmosphere
|
||||
flake8 atmosphere
|
||||
|
||||
[testenv:docs]
|
||||
deps =
|
||||
-r{toxinidir}/doc/requirements.txt
|
||||
commands =
|
||||
doc8 doc
|
||||
sphinx-build -W -b html -d doc/build/doctrees doc/source doc/build/html
|
||||
|
||||
[pytest]
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
Loading…
Reference in New Issue
Block a user