Update patch set 3

Patch Set 3: Code-Review-1

(6 comments)

Patch-set: 3
Reviewer: Gerrit User 28715 <28715@4a232e18-c5a9-48ee-94c0-e04e7cca6543>
Label: Code-Review=-1, 3704e69be651940c00a756ded614dcd16cd6bcab
Attention: {"person_ident":"Gerrit User 28715 \u003c28715@4a232e18-c5a9-48ee-94c0-e04e7cca6543\u003e","operation":"REMOVE","reason":"\u003cGERRIT_ACCOUNT_28715\u003e replied on the change"}
Attention: {"person_ident":"Gerrit User 36436 \u003c36436@4a232e18-c5a9-48ee-94c0-e04e7cca6543\u003e","operation":"ADD","reason":"\u003cGERRIT_ACCOUNT_28715\u003e replied on the change"}
This commit is contained in:
Gerrit User 28715 2024-01-30 14:44:44 +00:00 committed by Gerrit Code Review
parent e3cc146604
commit 724b3ab432
1 changed files with 142 additions and 0 deletions

View File

@ -0,0 +1,142 @@
{
"comments": [
{
"unresolved": true,
"key": {
"uuid": "cab17305_414c334c",
"filename": "/COMMIT_MSG",
"patchSetId": 3
},
"lineNbr": 7,
"author": {
"id": 28715
},
"writtenOn": "2024-01-30T14:44:44Z",
"side": 1,
"message": "Add 800.002 alarm for K8S cluster unavailable",
"range": {
"startLine": 7,
"startChar": 0,
"endLine": 7,
"endChar": 42
},
"revId": "021718688b014e905bb274643ee053bc40589880",
"serverId": "4a232e18-c5a9-48ee-94c0-e04e7cca6543"
},
{
"unresolved": true,
"key": {
"uuid": "2baa163c_4874d098",
"filename": "/COMMIT_MSG",
"patchSetId": 3
},
"lineNbr": 11,
"author": {
"id": 28715
},
"writtenOn": "2024-01-30T14:44:44Z",
"side": 1,
"message": "This adds 800.002 alarm to indicate K8S cluster unavailable due to\nunhealthy/failed kube-apiserver.",
"range": {
"startLine": 9,
"startChar": 0,
"endLine": 11,
"endChar": 51
},
"revId": "021718688b014e905bb274643ee053bc40589880",
"serverId": "4a232e18-c5a9-48ee-94c0-e04e7cca6543"
},
{
"unresolved": true,
"key": {
"uuid": "68199cd2_c9bd2100",
"filename": "fm-api/source/fm_api/constants.py",
"patchSetId": 3
},
"lineNbr": 312,
"author": {
"id": 28715
},
"writtenOn": "2024-01-30T14:44:44Z",
"side": 1,
"message": "Kubernetes cluster\n(not node).\nThere are multiple nodes in a cluster.",
"range": {
"startLine": 312,
"startChar": 0,
"endLine": 312,
"endChar": 31
},
"revId": "021718688b014e905bb274643ee053bc40589880",
"serverId": "4a232e18-c5a9-48ee-94c0-e04e7cca6543"
},
{
"unresolved": true,
"key": {
"uuid": "a954dffc_68093717",
"filename": "fm-doc/fm_doc/events.yaml",
"patchSetId": 3
},
"lineNbr": 3437,
"author": {
"id": 28715
},
"writtenOn": "2024-01-30T14:44:44Z",
"side": 1,
"message": "K8S cluster unreachable",
"range": {
"startLine": 3436,
"startChar": 16,
"endLine": 3437,
"endChar": 44
},
"revId": "021718688b014e905bb274643ee053bc40589880",
"serverId": "4a232e18-c5a9-48ee-94c0-e04e7cca6543"
},
{
"unresolved": true,
"key": {
"uuid": "c056dab9_d2d664fc",
"filename": "fm-doc/fm_doc/events.yaml",
"patchSetId": 3
},
"lineNbr": 3439,
"author": {
"id": 28715
},
"writtenOn": "2024-01-30T14:44:44Z",
"side": 1,
"message": "Need to work on this what the recovery operation actually is in this case.\nI don\u0027t think restarting kubelet.service (per node), or deleting kube-apiserver pods will work.. TBD.\n\nPerhaps it is only:\nIf problem persists, contact next level of support.",
"range": {
"startLine": 3439,
"startChar": 29,
"endLine": 3439,
"endChar": 55
},
"revId": "021718688b014e905bb274643ee053bc40589880",
"serverId": "4a232e18-c5a9-48ee-94c0-e04e7cca6543"
},
{
"unresolved": true,
"key": {
"uuid": "4ac1686a_6f2abeed",
"filename": "fm-doc/fm_doc/events.yaml",
"patchSetId": 3
},
"lineNbr": 3448,
"author": {
"id": 28715
},
"writtenOn": "2024-01-30T14:44:44Z",
"side": 1,
"message": "not sure what probable cause.. perhaps there is better choice for this.",
"range": {
"startLine": 3446,
"startChar": 0,
"endLine": 3448,
"endChar": 0
},
"revId": "021718688b014e905bb274643ee053bc40589880",
"serverId": "4a232e18-c5a9-48ee-94c0-e04e7cca6543"
}
]
}