Browse Source

Some polish for APIv2

- update_keypair now only in v2 schema
- tenant_id->project_id in cluster provision steps
- tenant_id->project_id in referenced job binaries in job templates
- proper check for job template existence, to fail early (as intended)
- hadoop_version->plugin_version for query string filter
- unbreak some data source stuff (related to tenant_id->project_id)
- fix omission of show_progress from cluster GET query string whitelist
- job_id->job_template_id for v2 jobs
- add missing release note info for strict query string checking
- release notes for all the rest

Change-Id: Idea117c406b5ab9b8d85ccf8adb175053416d6ff
Story: 2004505
Task: 28822
tags/10.0.0.0b1
Jeremy Freudberg 5 months ago
parent
commit
69d74c1a66

+ 12
- 0
releasenotes/notes/some-polish-api-v2-2d2e390a74b088f9.yaml View File

@@ -0,0 +1,12 @@
1
+---
2
+other:
3
+  - Some polishings to APIv2 have been made in an effort to bring it from
4
+    experimental (and therefore, evolving and unpredictable) to stable. More
5
+    instances of `tenant_id` have been changed to `project_id`, in the
6
+    cluster and job template APIs. `job_id` was changed to `job_template_id`
7
+    in the job API. The newly-minted query string validation feature has been
8
+    fixed to allow `show_progress` as a parameter on cluster GET; on a similar
9
+    note some APIv2 endpoints which previously could be filtered by
10
+    `hadoop_version` are now filtered by `plugin_version` instead. Also, the
11
+    schema for cluster PATCH in APIv1.1 now no longer includes the key
12
+    `update_keypair`; its prior inclusion was a mistake.

+ 5
- 0
releasenotes/notes/strict-validation-query-string-a6cadbf2f9c57d06.yaml View File

@@ -0,0 +1,5 @@
1
+---
2
+other:
3
+  - In APIv2 there is now strict checking of parameters in the query string.
4
+    This means that unexpected values in the query string will give a 400
5
+    error (as opposed to previously being ignored, or causing a 500 error).

+ 6
- 2
sahara/api/v2/cluster_templates.py View File

@@ -29,9 +29,13 @@ rest = u.RestV2('cluster-templates', __name__)
29 29
 @v.check_exists(api.get_cluster_template, 'marker')
30 30
 @v.validate(None, v.validate_pagination_limit,
31 31
             v.validate_sorting_cluster_templates)
32
-@v.validate_request_params(['plugin_name', 'hadoop_version', 'name'])
32
+@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
33 33
 def cluster_templates_list():
34
-    result = api.get_cluster_templates(**u.get_request_args().to_dict())
34
+    request_args = u.get_request_args().to_dict()
35
+    if 'plugin_version' in request_args:
36
+        request_args['hadoop_version'] = request_args['plugin_version']
37
+        del request_args['plugin_version']
38
+    result = api.get_cluster_templates(**request_args)
35 39
     for ct in result:
36 40
         u._replace_hadoop_version_plugin_version(ct)
37 41
         u._replace_tenant_id_project_id(ct)

+ 19
- 4
sahara/api/v2/clusters.py View File

@@ -27,16 +27,28 @@ import sahara.utils.api as u
27 27
 rest = u.RestV2('clusters', __name__)
28 28
 
29 29
 
30
+def _replace_tenant_id_project_id_provision_steps(c):
31
+    if 'provision_progress' in c:
32
+        for step in c['provision_progress']:
33
+            dict.update(step, {'project_id': step['tenant_id']})
34
+            dict.pop(step, 'tenant_id')
35
+
36
+
30 37
 @rest.get('/clusters')
31 38
 @acl.enforce("data-processing:clusters:get_all")
32 39
 @v.check_exists(api.get_cluster, 'marker')
33 40
 @v.validate(None, v.validate_pagination_limit)
34
-@v.validate_request_params(['plugin_name', 'hadoop_version', 'name'])
41
+@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
35 42
 def clusters_list():
36
-    result = api.get_clusters(**u.get_request_args().to_dict())
43
+    request_args = u.get_request_args().to_dict()
44
+    if 'plugin_version' in request_args:
45
+        request_args['hadoop_version'] = request_args['plugin_version']
46
+        del request_args['plugin_version']
47
+    result = api.get_clusters(**request_args)
37 48
     for c in result:
38 49
         u._replace_hadoop_version_plugin_version(c)
39 50
         u._replace_tenant_id_project_id(c)
51
+        _replace_tenant_id_project_id_provision_steps(c)
40 52
     return u.render(res=result, name='clusters')
41 53
 
42 54
 
@@ -73,13 +85,14 @@ def clusters_scale(cluster_id, data):
73 85
         api.scale_cluster, cluster_id, data)
74 86
     u._replace_hadoop_version_plugin_version(result['cluster'])
75 87
     u._replace_tenant_id_project_id(result['cluster'])
88
+    _replace_tenant_id_project_id_provision_steps(result['cluster'])
76 89
     return u.render(result)
77 90
 
78 91
 
79 92
 @rest.get('/clusters/<cluster_id>')
80 93
 @acl.enforce("data-processing:clusters:get")
81 94
 @v.check_exists(api.get_cluster, 'cluster_id')
82
-@v.validate_request_params([])
95
+@v.validate_request_params(['show_progress'])
83 96
 def clusters_get(cluster_id):
84 97
     data = u.get_request_args()
85 98
     show_events = six.text_type(
@@ -88,19 +101,21 @@ def clusters_get(cluster_id):
88 101
         api.get_cluster, cluster_id, show_events)
89 102
     u._replace_hadoop_version_plugin_version(result['cluster'])
90 103
     u._replace_tenant_id_project_id(result['cluster'])
104
+    _replace_tenant_id_project_id_provision_steps(result['cluster'])
91 105
     return u.render(result)
92 106
 
93 107
 
94 108
 @rest.patch('/clusters/<cluster_id>')
95 109
 @acl.enforce("data-processing:clusters:modify")
96 110
 @v.check_exists(api.get_cluster, 'cluster_id')
97
-@v.validate(v_c_schema.CLUSTER_UPDATE_SCHEMA, v_c.check_cluster_update)
111
+@v.validate(v_c_schema.CLUSTER_UPDATE_SCHEMA_V2, v_c.check_cluster_update)
98 112
 @v.validate_request_params([])
99 113
 def clusters_update(cluster_id, data):
100 114
     result = u.to_wrapped_dict_no_render(
101 115
         api.update_cluster, cluster_id, data)
102 116
     u._replace_hadoop_version_plugin_version(result['cluster'])
103 117
     u._replace_tenant_id_project_id(result['cluster'])
118
+    _replace_tenant_id_project_id_provision_steps(result['cluster'])
104 119
     return u.render(result)
105 120
 
106 121
 

+ 4
- 4
sahara/api/v2/data_sources.py View File

@@ -52,9 +52,9 @@ def data_source_register(data):
52 52
 @v.check_exists(api.get_data_source, 'data_source_id')
53 53
 @v.validate_request_params([])
54 54
 def data_source_get(data_source_id):
55
-    result = u.to_wrapped_dict(api.get_data_source, data_source_id)
55
+    result = api.get_data_source(data_source_id).to_wrapped_dict()
56 56
     u._replace_tenant_id_project_id(result['data_source'])
57
-    return result
57
+    return u.render(result)
58 58
 
59 59
 
60 60
 @rest.delete('/data-sources/<data_source_id>')
@@ -72,6 +72,6 @@ def data_source_delete(data_source_id):
72 72
 @v.validate(v_d_s_schema.DATA_SOURCE_UPDATE_SCHEMA)
73 73
 @v.validate_request_params([])
74 74
 def data_source_update(data_source_id, data):
75
-    result = u.to_wrapped_dict(api.data_source_update, data_source_id, data)
75
+    result = api.data_source_update(data_source_id, data).to_wrapped_dict()
76 76
     u._replace_tenant_id_project_id(result['data_source'])
77
-    return result
77
+    return u.render(result)

+ 18
- 4
sahara/api/v2/job_templates.py View File

@@ -24,9 +24,15 @@ import sahara.utils.api as u
24 24
 rest = u.RestV2('job-templates', __name__)
25 25
 
26 26
 
27
+def _replace_tenant_id_project_id_job_binary(jb_list):
28
+    for jb_obj in jb_list:
29
+        dict.update(jb_obj, {'project_id': jb_obj['tenant_id']})
30
+        dict.pop(jb_obj, 'tenant_id')
31
+
32
+
27 33
 @rest.get('/job-templates')
28 34
 @acl.enforce("data-processing:job-templates:get_all")
29
-@v.check_exists(api.get_job_templates, 'marker')
35
+@v.check_exists(api.get_job_template, 'marker')
30 36
 @v.validate(None, v.validate_pagination_limit,
31 37
             v.validate_sorting_jobs)
32 38
 @v.validate_request_params(['type', 'name'])
@@ -34,6 +40,8 @@ def job_templates_list():
34 40
     result = api.get_job_templates(**u.get_request_args().to_dict())
35 41
     for jt in result:
36 42
         u._replace_tenant_id_project_id(jt)
43
+        _replace_tenant_id_project_id_job_binary(jt['mains'])
44
+        _replace_tenant_id_project_id_job_binary(jt['libs'])
37 45
     return u.render(res=result, name='job_templates')
38 46
 
39 47
 
@@ -44,35 +52,41 @@ def job_templates_list():
44 52
 def job_templates_create(data):
45 53
     result = {'job_template': api.create_job_template(data).to_dict()}
46 54
     u._replace_tenant_id_project_id(result['job_template'])
55
+    _replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
56
+    _replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
47 57
     return u.render(result)
48 58
 
49 59
 
50 60
 @rest.get('/job-templates/<job_templates_id>')
51 61
 @acl.enforce("data-processing:job-templates:get")
52
-@v.check_exists(api.get_job_templates, id='job_templates_id')
62
+@v.check_exists(api.get_job_template, id='job_templates_id')
53 63
 @v.validate_request_params([])
54 64
 def job_templates_get(job_templates_id):
55 65
     result = {'job_template': api.get_job_template(
56 66
         job_templates_id).to_dict()}
57 67
     u._replace_tenant_id_project_id(result['job_template'])
68
+    _replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
69
+    _replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
58 70
     return u.render(result)
59 71
 
60 72
 
61 73
 @rest.patch('/job-templates/<job_templates_id>')
62 74
 @acl.enforce("data-processing:jobs:modify")
63
-@v.check_exists(api.get_job_templates, id='job_templates_id')
75
+@v.check_exists(api.get_job_template, id='job_templates_id')
64 76
 @v.validate(v_j_schema.JOB_UPDATE_SCHEMA)
65 77
 @v.validate_request_params([])
66 78
 def job_templates_update(job_templates_id, data):
67 79
     result = {'job_template': api.update_job_template(
68 80
         job_templates_id, data).to_dict()}
69 81
     u._replace_tenant_id_project_id(result['job_template'])
82
+    _replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
83
+    _replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
70 84
     return u.render(result)
71 85
 
72 86
 
73 87
 @rest.delete('/job-templates/<job_templates_id>')
74 88
 @acl.enforce("data-processing:jobs:delete")
75
-@v.check_exists(api.get_job_templates, id='job_templates_id')
89
+@v.check_exists(api.get_job_template, id='job_templates_id')
76 90
 @v.validate_request_params([])
77 91
 def job_templates_delete(job_templates_id):
78 92
     api.delete_job_template(job_templates_id)

+ 6
- 3
sahara/api/v2/job_types.py View File

@@ -24,10 +24,13 @@ rest = u.RestV2('job-types', __name__)
24 24
 
25 25
 @rest.get('/job-types')
26 26
 @acl.enforce("data-processing:job-types:get_all")
27
-@v.validate_request_params(['type', 'plugin_name', 'hadoop_version'])
27
+@v.validate_request_params(['type', 'plugin_name', 'plugin_version'])
28 28
 def job_types_get():
29 29
     # We want to use flat=False with to_dict() so that
30 30
     # the value of each arg is given as a list. This supports
31 31
     # filters of the form ?type=Pig&type=Java, etc.
32
-    return u.render(job_types=api.get_job_types(
33
-        **u.get_request_args().to_dict(flat=False)))
32
+    request_args = u.get_request_args().to_dict(flat=False)
33
+    if 'plugin_version' in request_args:
34
+        request_args['hadoop_version'] = request_args['plugin_version']
35
+        del request_args['plugin_version']
36
+    return u.render(job_types=api.get_job_types(**request_args))

+ 9
- 0
sahara/api/v2/jobs.py View File

@@ -26,6 +26,11 @@ import sahara.utils.api as u
26 26
 rest = u.RestV2('jobs', __name__)
27 27
 
28 28
 
29
+def _replace_job_id_job_template_id(job_obj):
30
+    dict.update(job_obj, {'job_template_id': job_obj['job_id']})
31
+    dict.pop(job_obj, 'job_id')
32
+
33
+
29 34
 @rest.get('/jobs')
30 35
 @acl.enforce("data-processing:job-executions:get_all")
31 36
 @v.check_exists(api.get_job_execution, 'marker')
@@ -40,6 +45,7 @@ def jobs_list():
40 45
     for je in result:
41 46
         je.pop('oozie_job_id', force=True)
42 47
         u._replace_tenant_id_project_id(je)
48
+        _replace_job_id_job_template_id(je)
43 49
     return u.render(res=result, name='jobs')
44 50
 
45 51
 
@@ -53,6 +59,7 @@ def jobs_execute(data):
53 59
                 {'engine_job_id': result['job']['oozie_job_id']})
54 60
     dict.pop(result['job'], 'oozie_job_id')
55 61
     u._replace_tenant_id_project_id(result['job'])
62
+    _replace_job_id_job_template_id(result['job'])
56 63
     return u.render(result)
57 64
 
58 65
 
@@ -67,6 +74,7 @@ def jobs_get(job_id):
67 74
     result = {'job': api.get_job_execution(job_id, refresh_status)}
68 75
     result['job'].pop('oozie_job_id', force=True)
69 76
     u._replace_tenant_id_project_id(result['job'])
77
+    _replace_job_id_job_template_id(result['job'])
70 78
     return u.render(result)
71 79
 
72 80
 
@@ -80,6 +88,7 @@ def jobs_update(job_id, data):
80 88
     result = {'job': api.update_job_execution(job_id, data)}
81 89
     result['job'].pop('oozie_job_id', force=True)
82 90
     u._replace_tenant_id_project_id(result['job'])
91
+    _replace_job_id_job_template_id(result['job'])
83 92
     return u.render(result)
84 93
 
85 94
 

+ 6
- 2
sahara/api/v2/node_group_templates.py View File

@@ -30,9 +30,13 @@ rest = u.RestV2('node-group-templates', __name__)
30 30
 @v.check_exists(api.get_node_group_template, 'marker')
31 31
 @v.validate(None, v.validate_pagination_limit,
32 32
             v.validate_sorting_node_group_templates)
33
-@v.validate_request_params(['plugin_name', 'hadoop_version', 'name'])
33
+@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
34 34
 def node_group_templates_list():
35
-    result = api.get_node_group_templates(**u.get_request_args().to_dict())
35
+    request_args = u.get_request_args().to_dict()
36
+    if 'plugin_version' in request_args:
37
+        request_args['hadoop_version'] = request_args['plugin_version']
38
+        del request_args['plugin_version']
39
+    result = api.get_node_group_templates(**request_args)
36 40
     for ngt in result:
37 41
         u._replace_hadoop_version_plugin_version(ngt)
38 42
         u._replace_tenant_id_project_id(ngt)

+ 5
- 3
sahara/service/validations/clusters_schema.py View File

@@ -71,9 +71,6 @@ CLUSTER_UPDATE_SCHEMA = {
71 71
         "description": {
72 72
             "type": ["string", "null"]
73 73
         },
74
-        "update_keypair": {
75
-            "type": ["boolean", "null"]
76
-        },
77 74
         "name": {
78 75
             "type": "string",
79 76
             "minLength": 1,
@@ -99,6 +96,11 @@ CLUSTER_UPDATE_SCHEMA = {
99 96
     "additionalProperties": False,
100 97
     "required": []
101 98
 }
99
+CLUSTER_UPDATE_SCHEMA_V2 = copy.deepcopy(CLUSTER_UPDATE_SCHEMA)
100
+CLUSTER_UPDATE_SCHEMA_V2['properties'].update({
101
+    "update_keypair": {
102
+        "type": ["boolean", "null"]
103
+    }})
102 104
 
103 105
 CLUSTER_SCALING_SCHEMA = {
104 106
     "type": "object",

Loading…
Cancel
Save