Flask blueprints are used
* blueprints are used * syntax is fixed for flask8
This commit is contained in:
parent
85c11fe3e1
commit
fb8d179c98
|
@ -1,20 +1,76 @@
|
|||
import flask
|
||||
from flask import Flask
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
import flask_jsonschema
|
||||
import flask_sqlalchemy
|
||||
import os
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
|
||||
app = flask.Flask(__name__)
|
||||
app = Flask(__name__)
|
||||
|
||||
# Extensions
|
||||
|
||||
# Registering flask extensions
|
||||
app.config['JSONSCHEMA_DIR'] = os.path.join(app.root_path, 'schemas')
|
||||
flask_jsonschema.JsonSchema(app)
|
||||
|
||||
db = flask_sqlalchemy.SQLAlchemy(app, session_options={'autocommit': True})
|
||||
# app.config['SQLALCHEMY_ECHO'] = True
|
||||
|
||||
# Errors handling
|
||||
from collector.api import error_handling
|
||||
|
||||
# Resources handling
|
||||
from collector.api.resources import *
|
||||
# Registering blueprints
|
||||
from collector.api.resources.action_logs import bp as action_logs_bp
|
||||
from collector.api.resources.ping import bp as ping_bp
|
||||
|
||||
app.register_blueprint(action_logs_bp)
|
||||
app.register_blueprint(ping_bp)
|
||||
|
||||
|
||||
# Registering error handlers
|
||||
@app.errorhandler(400)
|
||||
def bad_request(error):
|
||||
app.logger.error("Bad request: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error',
|
||||
'message': '{}'.format(error)}), 400)
|
||||
|
||||
|
||||
@app.errorhandler(IntegrityError)
|
||||
def integrity_error(error):
|
||||
app.logger.error("Bad request: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error',
|
||||
'message': '{}'.format(error)}), 400)
|
||||
|
||||
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
app.logger.error("Not found: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error',
|
||||
'message': '{}'.format(error)}), 404)
|
||||
|
||||
|
||||
@app.errorhandler(405)
|
||||
def not_allowed(error):
|
||||
app.logger.error("Method not allowed: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error',
|
||||
'message': '{}'.format(error)}), 405)
|
||||
|
||||
|
||||
@app.errorhandler(flask_jsonschema.ValidationError)
|
||||
def validation_error(error):
|
||||
app.logger.error("Validation error: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error',
|
||||
'message': '{}'.format(error)}), 400)
|
||||
|
||||
|
||||
@app.errorhandler(500)
|
||||
def server_error(error):
|
||||
app.logger.error("Server error: {}".format(error))
|
||||
error_name = error.__class__.__name__
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
'status': 'error',
|
||||
'message': '{0}: {1}'.format(error_name, error)
|
||||
}
|
||||
),
|
||||
500
|
||||
)
|
||||
|
|
|
@ -13,5 +13,3 @@ ACTION_LOG_STATUSES = make_enum(
|
|||
'existed',
|
||||
'failed'
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -23,13 +23,21 @@ def handle_response(http_code, *path):
|
|||
@wraps(fn)
|
||||
def decorated(*args, **kwargs):
|
||||
response = fn(*args, **kwargs)
|
||||
current_app.logger.debug("Processing response: {}".format(response))
|
||||
current_app.logger.debug(
|
||||
"Processing response: {}".format(response)
|
||||
)
|
||||
if current_app.config.get('VALIDATE_RESPONSE', False) and path:
|
||||
current_app.logger.debug("Validating response: {}".format(response))
|
||||
current_app.logger.debug(
|
||||
"Validating response: {}".format(response)
|
||||
)
|
||||
jsonschema_ext = current_app.extensions.get('jsonschema')
|
||||
jsonschema.validate(response, jsonschema_ext.get_schema(path))
|
||||
current_app.logger.debug("Response validated: {}".format(response))
|
||||
current_app.logger.debug("Response processed: {}".format(response))
|
||||
current_app.logger.debug(
|
||||
"Response validated: {}".format(response)
|
||||
)
|
||||
current_app.logger.debug(
|
||||
"Response processed: {}".format(response)
|
||||
)
|
||||
return jsonify(response), http_code
|
||||
return decorated
|
||||
return wrapper
|
||||
|
@ -62,7 +70,7 @@ def db_transaction(fn):
|
|||
result = fn(*args, **kwargs)
|
||||
db.session.commit()
|
||||
return result
|
||||
except:
|
||||
except Exception:
|
||||
db.session.rollback()
|
||||
raise
|
||||
return decorated
|
||||
|
|
|
@ -11,7 +11,8 @@ class Production(object):
|
|||
LOG_LEVEL = logging.ERROR
|
||||
LOG_ROTATION = False
|
||||
LOGGER_NAME = 'collector'
|
||||
SQLALCHEMY_DATABASE_URI = 'postgresql://collector:*****@localhost/collector'
|
||||
SQLALCHEMY_DATABASE_URI = \
|
||||
'postgresql://collector:*****@localhost/collector'
|
||||
|
||||
|
||||
class Testing(Production):
|
||||
|
@ -24,4 +25,5 @@ class Testing(Production):
|
|||
LOG_ROTATION = True
|
||||
LOG_FILE_SIZE = 2048000
|
||||
LOG_FILES_COUNT = 5
|
||||
SQLALCHEMY_DATABASE_URI = 'postgresql://collector:collector@localhost/collector'
|
||||
SQLALCHEMY_DATABASE_URI = \
|
||||
'postgresql://collector:collector@localhost/collector'
|
||||
|
|
|
@ -16,7 +16,10 @@ fileConfig(config.config_file_name)
|
|||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
from flask import current_app
|
||||
config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI'))
|
||||
config.set_main_option(
|
||||
'sqlalchemy.url',
|
||||
current_app.config.get('SQLALCHEMY_DATABASE_URI')
|
||||
)
|
||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
|
@ -24,6 +27,7 @@ target_metadata = current_app.extensions['migrate'].db.metadata
|
|||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
|
@ -42,6 +46,7 @@ def run_migrations_offline():
|
|||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
|
@ -50,15 +55,13 @@ def run_migrations_online():
|
|||
|
||||
"""
|
||||
engine = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool
|
||||
)
|
||||
|
||||
connection = engine.connect()
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
try:
|
||||
with context.begin_transaction():
|
||||
|
@ -70,4 +73,3 @@ if context.is_offline_mode():
|
|||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
|
||||
|
|
|
@ -16,12 +16,13 @@ import sqlalchemy as sa
|
|||
|
||||
def upgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('action_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('node_aid', sa.String(), nullable=False),
|
||||
sa.Column('external_id', sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('node_aid', 'external_id')
|
||||
op.create_table(
|
||||
'action_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('node_aid', sa.String(), nullable=False),
|
||||
sa.Column('external_id', sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('node_aid', 'external_id')
|
||||
)
|
||||
### end Alembic commands ###
|
||||
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
from flask import jsonify
|
||||
from flask import make_response
|
||||
import flask_jsonschema
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from collector.api.app import app
|
||||
|
||||
|
||||
@app.errorhandler(400)
|
||||
def bad_request(error):
|
||||
app.logger.error("Bad request: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error', 'message': '{}'.format(error)}), 400)
|
||||
|
||||
|
||||
@app.errorhandler(IntegrityError)
|
||||
def integrity_error(error):
|
||||
app.logger.error("Bad request: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error', 'message': '{}'.format(error)}), 400)
|
||||
|
||||
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
app.logger.error("Not found: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error', 'message': '{}'.format(error)}), 404)
|
||||
|
||||
|
||||
@app.errorhandler(405)
|
||||
def not_found(error):
|
||||
app.logger.error("Method not allowed: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error', 'message': '{}'.format(error)}), 405)
|
||||
|
||||
|
||||
@app.errorhandler(flask_jsonschema.ValidationError)
|
||||
def validation_error(error):
|
||||
app.logger.error("Validation error: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error', 'message': '{}'.format(error)}), 400)
|
||||
|
||||
|
||||
@app.errorhandler(500)
|
||||
def server_error(error):
|
||||
app.logger.error("Server error: {}".format(error))
|
||||
return make_response(jsonify({'status': 'error', 'message': '{0}: {1}'.format(error.__class__.__name__, error)}), 500)
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -7,9 +7,11 @@ from collector.api.app import app
|
|||
|
||||
def get_file_handler():
|
||||
if app.config.get('LOG_ROTATION'):
|
||||
file_handler = RotatingFileHandler(app.config.get('LOG_FILE'),
|
||||
maxBytes=app.config.get('LOG_FILE_SIZE'),
|
||||
backupCount='LOG_FILES_COUNT')
|
||||
file_handler = RotatingFileHandler(
|
||||
app.config.get('LOG_FILE'),
|
||||
maxBytes=app.config.get('LOG_FILE_SIZE'),
|
||||
backupCount='LOG_FILES_COUNT'
|
||||
)
|
||||
else:
|
||||
file_handler = FileHandler(app.config.get('LOG_FILE'))
|
||||
file_handler.setLevel(app.config.get('LOG_LEVEL'))
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
__all__ = ['action_logs', 'ping']
|
|
@ -1,3 +1,4 @@
|
|||
from flask import Blueprint
|
||||
from flask import request
|
||||
from flask_jsonschema import validate as validate_request
|
||||
from sqlalchemy import and_
|
||||
|
@ -5,20 +6,25 @@ from sqlalchemy import or_
|
|||
|
||||
from collector.api.app import app
|
||||
from collector.api.app import db
|
||||
from collector.api.db.model import ActionLog
|
||||
from collector.api.common import consts
|
||||
from collector.api.common import util
|
||||
from collector.api.common.util import db_transaction
|
||||
from collector.api.common.util import exec_time
|
||||
from collector.api.common.util import handle_response
|
||||
from collector.api.db.model import ActionLog
|
||||
|
||||
|
||||
@app.route('/api/v1/action_logs/', methods=['POST'])
|
||||
bp = Blueprint('action_logs', __name__, url_prefix='/api/v1/action_logs')
|
||||
|
||||
|
||||
@bp.route('/', methods=['POST'])
|
||||
@validate_request('action_logs', 'request')
|
||||
@handle_response(201, 'action_logs', 'response')
|
||||
@exec_time
|
||||
def post():
|
||||
app.logger.debug("Handling action_logs post request: {}".format(request.json))
|
||||
app.logger.debug(
|
||||
"Handling action_logs post request: {}".format(request.json)
|
||||
)
|
||||
action_logs = request.json['action_logs']
|
||||
app.logger.debug("Inserting {} action logs".format(len(action_logs)))
|
||||
objects_info = []
|
||||
|
@ -41,7 +47,7 @@ def _save_action_logs(objects_info, action_logs):
|
|||
'external_id': action_log['external_id'],
|
||||
'status': consts.ACTION_LOG_STATUSES.added
|
||||
})
|
||||
except:
|
||||
except Exception:
|
||||
app.logger.exception("Processing of action logs chunk failed")
|
||||
_handle_chunk_processing_error(objects_info, action_logs)
|
||||
|
||||
|
@ -69,7 +75,10 @@ def _separate_action_logs(action_logs):
|
|||
action_logs_idx = util.build_index(action_logs, 'node_aid', 'external_id')
|
||||
clauses = []
|
||||
for aid, ext_id in action_logs_idx.keys():
|
||||
clauses.append(and_(ActionLog.node_aid == aid, ActionLog.external_id == ext_id))
|
||||
clauses.append(and_(
|
||||
ActionLog.node_aid == aid,
|
||||
ActionLog.external_id == ext_id
|
||||
))
|
||||
found_objs = db.session.query(ActionLog).filter(or_(*clauses)).all()
|
||||
|
||||
for existed in found_objs:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from flask import Blueprint
|
||||
from flask import request
|
||||
from flask_jsonschema import validate as validate_request
|
||||
|
||||
|
@ -6,7 +7,10 @@ from collector.api.common.util import exec_time
|
|||
from collector.api.common.util import handle_response
|
||||
|
||||
|
||||
@app.route('/api/v1/ping/', methods=['GET'])
|
||||
bp = Blueprint('ping', __name__, url_prefix='/api/v1/ping')
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET'])
|
||||
@validate_request('ping', 'request')
|
||||
@handle_response(200, 'ping', 'response')
|
||||
@exec_time
|
||||
|
|
|
@ -56,7 +56,8 @@ class DbTest(BaseTest):
|
|||
super(DbTest, self).setUp()
|
||||
|
||||
# Cleaning DB. It useful in case of tests failure
|
||||
directory = os.path.join(os.path.dirname(__file__), '..', 'api', 'db', 'migrations')
|
||||
directory = os.path.join(os.path.dirname(__file__),
|
||||
'..', 'api', 'db', 'migrations')
|
||||
with app.app_context():
|
||||
flask_migrate.downgrade(directory=directory)
|
||||
flask_migrate.upgrade(directory=directory)
|
||||
|
|
|
@ -52,7 +52,8 @@ class TestActionLogs(DbTest):
|
|||
|
||||
def test_post_duplication(self):
|
||||
node_aid = 'x'
|
||||
action_logs = [{'node_aid': node_aid, 'external_id': i} for i in xrange(100)]
|
||||
action_logs = [{'node_aid': node_aid, 'external_id': i}
|
||||
for i in xrange(100)]
|
||||
resp = self.post(
|
||||
'/api/v1/action_logs/',
|
||||
{'action_logs': action_logs}
|
||||
|
@ -69,9 +70,8 @@ class TestActionLogs(DbTest):
|
|||
self.assertEquals(len(action_logs), count_actual)
|
||||
|
||||
# Checking duplications is not added
|
||||
new_action_logs = [
|
||||
{'node_aid': node_aid, 'external_id': i} for i in xrange(len(action_logs) + 50)
|
||||
]
|
||||
new_action_logs = [{'node_aid': node_aid, 'external_id': i}
|
||||
for i in xrange(len(action_logs) + 50)]
|
||||
resp = self.post(
|
||||
'/api/v1/action_logs/',
|
||||
{'action_logs': action_logs + new_action_logs}
|
||||
|
|
Loading…
Reference in New Issue