Add ability to get jobs for all projects

to schedule backups of Cinder volumes using
centralized instance(s) of freezer-scheduler.

Change-Id: I06180e59ec5ae89d028577fe3eeb8682ae603ef2
This commit is contained in:
Volodymyr Mevsha
2025-02-26 16:44:54 +02:00
parent e0ecbad03a
commit 63e6f14066
10 changed files with 117 additions and 17 deletions

View File

@@ -42,6 +42,8 @@
#"admin_or_owner": "is_admin:True or project_id:%(project_id)s"
#"admin_or_service": "role:admin or role:service"
# Create client entry.
# POST /v1/clients
#"clients:create": ""
@@ -74,6 +76,10 @@
# GET /v1/jobs
#"jobs:get_all": ""
# Lists all projects jobs.
# GET /v1/jobs
#"jobs:get_all_projects": "rule:admin_or_service"
# Updates jobs.
# PATCH /v1/jobs/{job_id}
#"jobs:update": ""

View File

@@ -73,15 +73,19 @@ class JobsCollectionResource(JobsBaseResource):
"""
@policy.enforce('jobs:get_all')
def on_get(self, req, resp, project_id):
def on_get(self, req, resp, project_id, all_projects=False):
# GET /v2/{project_id}/jobs(?limit,offset) Lists jobs
user_id = req.get_header('X-User-ID')
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = self.json_body(req)
if all_projects:
policy.can('jobs:get_all_projects', req.env['freezer.context'])
obj_list = self.db.search_job(project_id=project_id,
user_id=user_id, offset=offset,
limit=limit, search=search)
user_id=user_id,
all_projects=all_projects,
offset=offset, limit=limit,
search=search)
resp.media = {'jobs': obj_list}
@policy.enforce('jobs:create')

View File

@@ -18,6 +18,7 @@
from oslo_policy import policy
UNPROTECTED = ''
ADMIN_OR_SERVICE = 'rule:admin_or_service'
rules = [
policy.RuleDefault(
@@ -25,7 +26,10 @@ rules = [
"role:admin"),
policy.RuleDefault(
"admin_or_owner",
"is_admin:True or project_id:%(project_id)s")
"is_admin:True or project_id:%(project_id)s"),
policy.RuleDefault(
"admin_or_service",
"role:admin or role:service"),
]

View File

@@ -62,6 +62,17 @@ rules = [
}
]
),
policy.DocumentedRuleDefault(
name=JOBS % 'get_all_projects',
check_str=base.ADMIN_OR_SERVICE,
description='Lists all projects jobs.',
operations=[
{
'path': '/v1/jobs',
'method': 'GET'
}
]
),
policy.DocumentedRuleDefault(
name=JOBS % 'update',
check_str=base.UNPROTECTED,

View File

@@ -233,15 +233,18 @@ def replace_tuple(tablename, user_id, tuple_id, tuple_values, project_id=None):
return tuple_id
def search_tuple(tablename, user_id, project_id=None, offset=0,
limit=100, search=None):
def search_tuple(tablename, user_id, project_id=None, all_projects=False,
offset=0, limit=100, search=None):
search = valid_and_get_search_option(search=search)
session = get_db_session()
if all_projects:
project_id = None
with session.begin():
try:
# TODO(gecong) search will be implemented in the future
query = model_query(session, tablename, project_id=project_id)
query = query.filter_by(user_id=user_id)
if not all_projects:
query = query.filter_by(user_id=user_id)
# If search option isn't valid or set, we use limit and offset
# in sqlalchemy level
if len(search) == 0:
@@ -768,12 +771,14 @@ def get_job(user_id, job_id, project_id=None):
return values
def search_job(user_id, project_id=None, offset=0,
def search_job(user_id, project_id=None, all_projects=False, offset=0,
limit=100, search=None):
jobs = []
result, search_key = search_tuple(tablename=models.Job, user_id=user_id,
project_id=project_id, offset=offset,
limit=limit, search=search)
project_id=project_id,
all_projects=all_projects,
offset=offset, limit=limit,
search=search)
for job in result:
jobmap = {}
jobmap['job_id'] = job.get('id')

View File

@@ -34,14 +34,18 @@ def setup_policy(conf):
ENFORCER.load_rules()
def can(rule, ctx):
ENFORCER.enforce(rule, {}, ctx.to_dict(), do_raise=True,
exc=exceptions.AccessForbidden)
def enforce(rule):
def decorator(func):
@functools.wraps(func)
def handler(*args, **kwargs):
ctx = args[1].env['freezer.context']
ENFORCER.enforce(rule, {}, ctx.to_dict(), do_raise=True,
exc=exceptions.AccessForbidden)
can(rule, ctx)
return func(*args, **kwargs)
return handler

View File

@@ -37,13 +37,16 @@ class TypeManagerV2(object):
self.doc_type = doc_type
@staticmethod
def get_base_search_filter(project_id, user_id=None, search=None):
def get_base_search_filter(project_id, user_id=None, all_projects=False,
search=None):
search = search or {}
project_id_filter = {"term": {"project_id": project_id}}
base_filter = [project_id_filter]
if user_id:
user_id_filter = {"term": {"user_id": user_id}}
base_filter.append(user_id_filter)
if all_projects:
base_filter = []
match_list = [{"match": m} for m in search.get('match', [])]
match_not_list = [{"match": m} for m in search.get('match_not', [])]
@@ -52,12 +55,14 @@ class TypeManagerV2(object):
return base_filter
@staticmethod
def get_search_query(project_id, doc_id, user_id=None, search=None):
def get_search_query(project_id, doc_id, user_id=None, all_projects=False,
search=None):
search = search or {}
try:
base_filter = TypeManagerV2.get_base_search_filter(
project_id=project_id,
user_id=user_id,
all_projects=all_projects,
search=search
)
query_filter = {"filter": {"bool": {"must": base_filter}}}
@@ -90,13 +95,14 @@ class TypeManagerV2(object):
doc['_version'] = res['_version']
return doc
def search(self, project_id, user_id=None, doc_id=None, search=None,
offset=0, limit=10):
def search(self, project_id, user_id=None, doc_id=None, all_projects=False,
search=None, offset=0, limit=10):
search = search or {}
query_dsl = self.get_search_query(
project_id=project_id,
user_id=user_id,
doc_id=doc_id,
all_projects=all_projects,
search=search
)
try:
@@ -436,10 +442,12 @@ class ElasticSearchEngineV2(object):
doc_id=job_id
)
def search_job(self, project_id, user_id, offset=0, limit=10, search=None):
def search_job(self, project_id, user_id, all_projects=False,
offset=0, limit=10, search=None):
search = search or {}
return self.job_manager.search(project_id=project_id,
user_id=user_id,
all_projects=all_projects,
search=search,
offset=offset,
limit=limit)

View File

@@ -355,6 +355,18 @@ def get_fake_job_id():
return uuid.uuid4().hex
def get_fake_job(project_id=None, user_id=None, client_id=None):
job = copy.deepcopy(fake_job_3)
if user_id:
job['user_id'] = user_id
if project_id:
job['project_id'] = project_id
if client_id:
job['client_id'] = client_id
job['job_id'] = get_fake_job_id()
return job
fake_job_0_elasticsearch_found = {
"_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
"_index": "freezer",
@@ -481,6 +493,17 @@ def get_fake_client_id():
return uuid.uuid4().hex
def get_fake_client(project_id=None, user_id=None):
client = copy.deepcopy(fake_client_entry_job_3)
client['client']['client_id'] = get_fake_client_id()
if project_id:
client['client']['project_id'] = project_id
if user_id:
client['user_id'] = user_id
client['client']['user_id'] = user_id
return client
fake_action_0 = {
"freezer_action":
{

View File

@@ -20,6 +20,7 @@
import copy
from unittest import mock
from unittest.mock import patch
from uuid import uuid4
from freezer_api.common import exceptions as freezer_api_exc
from freezer_api.tests.unit import common
@@ -193,6 +194,38 @@ class DbJobTestCase(base.DbTestCase):
jobmap = result[index]
self.assertEqual(jobids[index], jobmap['job_id'])
def test_job_list_all_projects_without_search(self):
fake_project_ids = [f"tjl-project-{x}" for x in range(0, 9)]
jobs = {}
for project_id in fake_project_ids:
user_id = str(uuid4())
client_doc = common.get_fake_client(project_id, user_id)
client_id = client_doc['client']['client_id']
self.dbapi.add_client(
project_id=project_id,
user_id=user_id,
doc=client_doc['client'],
)
job_doc = common.get_fake_job(project_id, user_id, client_id)
job_id = self.dbapi.add_job(user_id=user_id,
doc=job_doc,
project_id=project_id)
self.assertIsNotNone(job_id)
jobs[job_id] = project_id
result = self.dbapi.search_job(
project_id=self.fake_project_id,
user_id=user_id,
all_projects=True,
offset=0,
limit=1000,
)
self.assertIsNotNone(result)
# Find our jobs, ignore any others
for job in result:
if job['job_id'] in jobs:
jobs.pop(job['job_id'])
self.assertEqual(0, len(jobs))
def test_job_list_with_search_match_and_match_not(self):
count = 0
jobids = []

View File

@@ -1033,6 +1033,7 @@ class TestElasticSearchEngine_job(common.FreezerBaseTestCase, ElasticSearchDB):
self.eng.job_manager.search.assert_called_with(
project_id='tecs',
user_id=common.fake_job_0['user_id'],
all_projects=False,
search=my_search,
limit=15, offset=6)
@@ -1048,6 +1049,7 @@ class TestElasticSearchEngine_job(common.FreezerBaseTestCase, ElasticSearchDB):
self.eng.job_manager.search.assert_called_with(
project_id='tecs',
user_id=common.fake_job_0['user_id'],
all_projects=False,
search=my_search,
limit=15, offset=6)