Add CguHandler, DmesgWatcher, GnssMonitor classes

The CguHandler class reads a given ts2phc config file and uses this to
derive the nmea_serialport, pci address and cgu path. These values can
be short-circuited if they are known in advance.
From there, the cgu is read and parsed into a dict in order to
easily derive the status of the various pins on the NIC.

DmesgWatcher and GnssMonitor use an observer pattern. DmesgWatcher is
the subject and follows entries in a dmesg log for patterns that
GnssMonitor observers care about. It then updates the GnssMonitor
observers with the matched entry allowing GnssMonitor to parse and
handle the update as required. The DmesgWatcher can be extended to
support other observer types in the future.

GnssMonitor attaches to DmesgWatcher and performs the handling for
changes in GNSS status. This includes sending the status change to a
publisher which will be implemented in a future review.

Unit tests are included for CguHandler and DmesgWatcher. GnssMonitor is
covered by the tests as it is built off the functionality of
CguHandler.

Tox has been updated to automatically run the unit tests.

Testing:

Pass: Unit tests pass

Story: 2010056
Task: 45500

Signed-off-by: Cole Walker <cole.walker@windriver.com>
Change-Id: I4be477aa0fce8baa418a3ff450c6b998683ec10b
This commit is contained in:
Cole Walker 2022-05-27 12:46:27 -04:00
parent d0b5a1b644
commit 04bbd6f0ff
21 changed files with 769 additions and 220 deletions

View File

@ -8,10 +8,12 @@
jobs:
- openstack-tox-linters
- k8sapp-ptp-notification-tox-pylint
- ptp-notification-tox-py36
gate:
jobs:
- openstack-tox-linters
- k8sapp-ptp-notification-tox-pylint
- ptp-notification-tox-py36
post:
jobs:
- stx-ptp-notification-armada-app-upload-git-mirror
@ -29,6 +31,15 @@
secret: stx-ptp-notification-armada-app-github-secret
pass-to-parent: true
- job:
name: ptp-notification-tox-py36
parent: tox-py36
description: |
Run py36 test for ptp-notification
nodeset: ubuntu-bionic
vars:
tox_envlist: py36
- job:
name: k8sapp-ptp-notification-tox-pylint
parent: tox

View File

@ -0,0 +1,113 @@
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import logging
import os
import re
import sys
from trackingfunctionsdk.common.helpers import constants
from trackingfunctionsdk.common.helpers import log_helper
LOG = logging.getLogger(__name__)
log_helper.config_logger(LOG)
class CguHandler:
def __init__(self, config_file, nmea_serialport=None, pci_addr=None, cgu_path=None):
self.config_file = config_file
self.nmea_serialport = nmea_serialport
self.pci_addr = pci_addr
self.cgu_path = cgu_path
self.cgu_output_raw = ""
self.cgu_output_parsed = {}
def get_gnss_nmea_serialport_from_ts2phc_config(self):
# Read a tstphc config file and return the ts2phc.nmea_serialport
nmea_serialport = None
try:
with open(self.config_file, 'r') as infile:
for line in infile:
if constants.NMEA_SERIALPORT in line:
nmea_serialport = line.split(' ')[1].strip('\n')
break
self.nmea_serialport = nmea_serialport
return
except (FileNotFoundError, PermissionError) as err:
LOG.error(err)
raise
def convert_nmea_serialport_to_pci_addr(self, dmesg_path="/logs/dmesg"):
# Parse the nmea_serialport value into a PCI address so that we can later find the cgu
# Returns the address or None
pci_addr = None
# Get only the ttyGNSS_1800_0 portion of the path
nmea_serialport = self.nmea_serialport.split('/')[2]
LOG.debug("Looking for nmea_serialport value: %s" % nmea_serialport)
with open(dmesg_path, 'r') as dmesg:
for line in dmesg:
if nmea_serialport in line:
# Regex split to make any number of spaces the delimiter
# Eg. [ 4.834255] ice 0000:18:00.0: ttyGNSS_1800_0 registered
# Becomes: 0000:18:00.0
pci_addr = re.split(' +', line)[3].strip(':')
self.pci_addr = pci_addr
return
def get_cgu_path_from_pci_addr(self):
# Search for a cgu file using the given pci address
cgu_path = "/ice/" + self.pci_addr + "/cgu"
if os.path.exists(cgu_path):
LOG.debug("PCI address %s has cgu path %s" % (self.pci_addr, cgu_path))
self.cgu_path = cgu_path
return
else:
LOG.error("Could not find cgu path for PCI address %s" % self.pci_addr)
raise FileNotFoundError
def read_cgu(self):
# Read a given cgu path and return the output in a parseable structure
cgu_output = None
if os.path.exists(self.cgu_path):
with open(self.cgu_path, 'r') as infile:
cgu_output = infile.read()
self.cgu_output_raw = cgu_output
return
def cgu_output_to_dict(self):
# Take raw cgu output and parse it into a dict
cgu_output = self.cgu_output_raw.splitlines()
LOG.debug("CGU output: %s" % cgu_output)
cgu_dict = {'input': {},
'EEC DPLL': {
'Current reference': '',
'Status': ''
},
'PPS DPLL': {
'Current reference': '',
'Status': '',
'Phase offset': ''
}
}
for line in cgu_output[7:14]:
# Build a dict out of the 7 line table
dict_to_insert = {re.split(' +', line)[1]: {'state': re.split(' +', line)[4],
'priority': {'EEC': re.split(' +', line)[6],
'PPS': re.split(' +', line)[8]}
}
}
cgu_dict['input'].update(dict_to_insert)
# Add the DPLL data below the table
cgu_dict['EEC DPLL']['Current reference'] = re.split('[ \t]+', cgu_output[16])[3]
cgu_dict['EEC DPLL']['Status'] = re.split('[ \t]+', cgu_output[17])[2]
cgu_dict['PPS DPLL']['Current reference'] = re.split('[ \t]+', cgu_output[20])[3]
cgu_dict['PPS DPLL']['Status'] = re.split('[ \t]+', cgu_output[21])[2]
cgu_dict['PPS DPLL']['Phase offset'] = re.split('[ \t]+', cgu_output[22])[3]
self.cgu_output_parsed = cgu_dict
return

View File

@ -26,3 +26,13 @@ GM_IS_PRESENT = "true"
CLOCK_CLASS_VALUE1 = "6"
CLOCK_CLASS_VALUE2 = "7"
CLOCK_CLASS_VALUE3 = "135"
# ts2phc constants
NMEA_SERIALPORT = "ts2phc.nmea_serialport"
GNSS_PIN = "GNSS-1PPS"
GNSS_LOCKED_HO_ACK = 'locked_ho_ack'
GNSS_DPLL_0 = "DPLL0"
GNSS_DPLL_1 = "DPLL1"
# testing values
CGU_PATH_VALID = "/sys/kernel/debug/ice/0000:18:00.0/cgu"

View File

@ -0,0 +1,69 @@
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import logging
from pygtail import Pygtail
from typing import List
from abc import ABC, abstractmethod
from trackingfunctionsdk.common.helpers import log_helper
from trackingfunctionsdk.common.helpers.gnss_monitor import Observer
LOG = logging.getLogger(__name__)
log_helper.config_logger(LOG)
class DmesgSubject(ABC):
@abstractmethod
def attach(self, observer: Observer) -> None:
pass
@abstractmethod
def detach(self, observer: Observer) -> None:
pass
@abstractmethod
def notify(self) -> None:
pass
class DmesgWatcher(DmesgSubject, ABC):
_observers: List[Observer] = []
_checklist = []
_matched_line = ""
def __init__(self, dmesg_log_file="/logs/kern.log"):
self.dmesg_log_file = dmesg_log_file
def parse_dmesg_event(self, dmesg_entry) -> None:
for observer in self._observers:
if observer.dmesg_values_to_check['pin'] in dmesg_entry \
and observer.dmesg_values_to_check['pci_addr'] in dmesg_entry:
matched_line = dmesg_entry
self.notify(observer, matched_line)
def run_watcher(self) -> None:
"""
This is intended to be run as a separate thread to follow the log file for events.
There is currently no support in the NIC device drivers for udev events that
would avoid polling/monitoring.
"""
while True:
for line in Pygtail(self.dmesg_log_file, offset_file="./kern.offset"):
self.parse_dmesg_event(line)
def attach(self, observer: Observer) -> None:
LOG.info("DmesgWatcher: Attached an observer.")
self._observers.append(observer)
def notify(self, observer, matched_line) -> None:
LOG.info("DmesgWatcher: Notifying observers.")
observer.update(self, matched_line)
def detach(self, observer: Observer) -> None:
self._observers.remove(observer)
LOG.debug("Removed an observer.")

View File

@ -0,0 +1,76 @@
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import logging
from abc import ABC, abstractmethod
from datetime import datetime
from trackingfunctionsdk.common.helpers import log_helper
from trackingfunctionsdk.common.helpers.cgu_handler import CguHandler
LOG = logging.getLogger(__name__)
log_helper.config_logger(LOG)
class Observer(ABC):
@abstractmethod
def update(self, subject, matched_line) -> None:
"""
Receive update from subject.
"""
pass
class GnssMonitor(Observer):
gnss_eec_state = ""
gnss_pps_state = ""
event_time = None
gnss_cgu_handler = None
def __init__(self, config_file, nmea_serialport=None, pci_addr=None, cgu_path=None):
self.config_file = config_file
# Setup GNSS data
self.gnss_cgu_handler = CguHandler(config_file, nmea_serialport, pci_addr, cgu_path)
if self.gnss_cgu_handler.nmea_serialport is None:
self.gnss_cgu_handler.get_gnss_nmea_serialport_from_ts2phc_config()
if self.gnss_cgu_handler.pci_addr is None:
self.gnss_cgu_handler.convert_nmea_serialport_to_pci_addr()
if self.gnss_cgu_handler.cgu_path is None:
self.gnss_cgu_handler.get_cgu_path_from_pci_addr()
self.gnss_cgu_handler.read_cgu()
self.gnss_cgu_handler.cgu_output_to_dict()
self.dmesg_values_to_check = {'pin': 'GNSS-1PPS', 'pci_addr': self.gnss_cgu_handler.pci_addr}
# Initialize status
if self.gnss_cgu_handler.cgu_output_parsed['EEC DPLL']['Current reference'] == 'GNSS-1PPS':
self.gnss_eec_state = self.gnss_cgu_handler.cgu_output_parsed['EEC DPLL']['Status']
if self.gnss_cgu_handler.cgu_output_parsed['PPS DPLL']['Current reference'] == 'GNSS-1PPS':
self.gnss_pps_state = self.gnss_cgu_handler.cgu_output_parsed['PPS DPLL']['Status']
self.event_time = datetime.now().timestamp()
def update(self, subject, matched_line) -> None:
LOG.info("Kernel event detected. %s" % matched_line)
LOG.debug("GnssMonitor handler logic would run now")
self.set_gnss_status()
def set_gnss_status(self):
self.event_time = datetime.now().timestamp()
self.gnss_cgu_handler.read_cgu()
self.gnss_cgu_handler.cgu_output_to_dict()
self.gnss_eec_state = self.gnss_cgu_handler.cgu_output_parsed['EEC DPLL']['Status']
self.gnss_pps_state = self.gnss_cgu_handler.cgu_output_parsed['PPS DPLL']['Status']
LOG.debug("GNSS EEC Status is: %s" % self.gnss_eec_state)
LOG.debug("GNSS PPS Status is: %s" % self.gnss_pps_state)
def __publish_gnss_status(self, force=False):
LOG.debug("Publish GNSS status.")
# TODO implement a publisher class to handle this
pass

View File

@ -0,0 +1,9 @@
from wsme import types as wtypes
EnumGnssState = wtypes.Enum(str, 'Locked', 'Freerun', 'Holdover')
class GnssState(object):
Locked = "Locked"
Freerun = "Freerun"
Holdover = "Holdover"

View File

@ -7,3 +7,4 @@ EnumResourceType = wtypes.Enum(str, 'PTP', 'FPGA')
class ResourceType(object):
TypePTP = "PTP"
TypeFPGA = "FPGA"
TypeGNSS = "GNSS"

View File

@ -1,218 +1,235 @@
#
# Copyright (c) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import os
import json
import time
import oslo_messaging
from oslo_config import cfg
import logging
import multiprocessing as mp
import threading
from trackingfunctionsdk.common.helpers import rpc_helper
from trackingfunctionsdk.model.dto.rpc_endpoint import RpcEndpointInfo
from trackingfunctionsdk.model.dto.resourcetype import ResourceType
from trackingfunctionsdk.model.dto.ptpstate import PtpState
from trackingfunctionsdk.client.ptpeventproducer import PtpEventProducer
from trackingfunctionsdk.common.helpers import ptpsync as ptpsync
LOG = logging.getLogger(__name__)
from trackingfunctionsdk.common.helpers import log_helper
log_helper.config_logger(LOG)
THIS_NODE_NAME = os.environ.get("THIS_NODE_NAME",'controller-0')
'''Entry point of Default Process Worker'''
def ProcessWorkerDefault(event, sqlalchemy_conf_json, broker_transport_endpoint):
worker = PtpWatcherDefault(event, sqlalchemy_conf_json, broker_transport_endpoint)
worker.run()
return
class PtpWatcherDefault:
DEFAULT_PTPTRACKER_CONTEXT = {
'holdover_seconds': 30,
'poll_freq_seconds': 2
}
class PtpRequestHandlerDefault(object):
def __init__(self, watcher):
self.watcher = watcher
self.init_time = time.time()
def query_status(self, **rpc_kwargs):
self.watcher.ptptracker_context_lock.acquire()
sync_state = self.watcher.ptptracker_context.get('sync_state', PtpState.Freerun)
last_event_time = self.watcher.ptptracker_context.get('last_event_time', time.time())
self.watcher.ptptracker_context_lock.release()
lastStatus = {
'ResourceType': ResourceType.TypePTP,
'EventData': {
'State': sync_state
},
'ResourceQualifier': {
'NodeName': self.watcher.node_name
},
'EventTimestamp': last_event_time
}
return lastStatus
def trigger_delivery(self, **rpc_kwargs):
self.watcher.forced_publishing = True
self.watcher.signal_ptp_event()
pass
def __init__(self, event, sqlalchemy_conf_json, daemon_context_json):
self.sqlalchemy_conf = json.loads(sqlalchemy_conf_json)
self.event = event
self.init_time = time.time()
self.daemon_context = json.loads(daemon_context_json)
self.ptptracker_context = self.daemon_context.get(
'ptptracker_context', PtpWatcherDefault.DEFAULT_PTPTRACKER_CONTEXT)
self.ptptracker_context['sync_state'] = PtpState.Freerun
self.ptptracker_context['last_event_time'] = self.init_time
self.ptptracker_context_lock = threading.Lock()
self.ptp_device_simulated = "true" == self.ptptracker_context.get('device_simulated', "False").lower()
self.event_timeout = float(self.ptptracker_context['poll_freq_seconds'])
self.node_name = self.daemon_context['THIS_NODE_NAME']
self.namespace = self.daemon_context.get('THIS_NAMESPACE', 'notification')
broker_transport_endpoint = self.daemon_context['NOTIFICATION_TRANSPORT_ENDPOINT']
registration_transport_endpoint = self.daemon_context['REGISTRATION_TRANSPORT_ENDPOINT']
self.broker_endpoint = RpcEndpointInfo(broker_transport_endpoint)
self.registration_broker_endpoint = RpcEndpointInfo(registration_transport_endpoint)
self.ptpeventproducer = PtpEventProducer(
self.node_name,
self.broker_endpoint.TransportEndpoint,
self.registration_broker_endpoint.TransportEndpoint)
self.__ptprequest_handler = PtpWatcherDefault.PtpRequestHandlerDefault(self)
self.forced_publishing = False
def signal_ptp_event(self):
if self.event:
self.event.set()
else:
LOG.warning("Unable to assert ptp event")
pass
def run(self):
# start location listener
self.__start_listener()
while True:
# annouce the location
forced = self.forced_publishing
self.forced_publishing = False
self.__publish_ptpstatus(forced)
if self.event.wait(self.event_timeout):
LOG.debug("daemon control event is asserted")
self.event.clear()
else:
LOG.debug("daemon control event is timeout")
pass
continue
self.__stop_listener()
'''Start listener to answer querying from clients'''
def __start_listener(self):
LOG.debug("start listener to answer location querying")
self.ptpeventproducer.start_status_listener(
self.__ptprequest_handler
)
return
def __stop_listener(self):
LOG.debug("stop listener to answer location querying")
self.ptpeventproducer.stop_status_listener(self.location_info)
return
def __get_ptp_status(self, holdover_time, freq, sync_state, last_event_time):
new_event = False
new_event_time = last_event_time
if self.ptp_device_simulated:
now = time.time()
timediff = now - last_event_time
if timediff > holdover_time:
new_event = True
new_event_time = now
if sync_state == PtpState.Freerun:
sync_state = PtpState.Locked
elif sync_state == PtpState.Locked:
sync_state = PtpState.Holdover
elif sync_state == PtpState.Holdover:
sync_state = PtpState.Freerun
else:
sync_state = PtpState.Freerun
else:
new_event, sync_state, new_event_time = ptpsync.ptp_status(
holdover_time, freq, sync_state, last_event_time)
return new_event, sync_state, new_event_time
'''announce location'''
def __publish_ptpstatus(self, forced=False):
holdover_time = float(self.ptptracker_context['holdover_seconds'])
freq = float(self.ptptracker_context['poll_freq_seconds'])
sync_state = self.ptptracker_context.get('sync_state', 'Unknown')
last_event_time = self.ptptracker_context.get('last_event_time', time.time())
new_event, sync_state, new_event_time = self.__get_ptp_status(
holdover_time, freq, sync_state, last_event_time)
if new_event or forced:
# update context
self.ptptracker_context_lock.acquire()
self.ptptracker_context['sync_state'] = sync_state
self.ptptracker_context['last_event_time'] = new_event_time
self.ptptracker_context_lock.release()
# publish new event
LOG.debug("publish ptp status to clients")
lastStatus = {
'ResourceType': 'PTP',
'EventData': {
'State': sync_state
},
'ResourceQualifier': {
'NodeName': self.node_name
},
'EventTimestamp': new_event_time
}
self.ptpeventproducer.publish_status(lastStatus)
return
class DaemonControl(object):
def __init__(self, sqlalchemy_conf_json, daemon_context_json, process_worker = None):
self.event = mp.Event()
self.daemon_context = json.loads(daemon_context_json)
self.node_name = self.daemon_context['THIS_NODE_NAME']
if not process_worker:
process_worker = ProcessWorkerDefault
self. sqlalchemy_conf_json = sqlalchemy_conf_json
self.daemon_context_json = daemon_context_json
self.process_worker = process_worker
return
def refresh(self):
self.process_worker(self.event, self.sqlalchemy_conf_json, self.daemon_context_json)
self.event.set()
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import os
import json
import time
import oslo_messaging
from oslo_config import cfg
import logging
import multiprocessing as mp
import threading
from trackingfunctionsdk.client.ptpeventproducer import PtpEventProducer
from trackingfunctionsdk.common.helpers import ptpsync
from trackingfunctionsdk.common.helpers import log_helper
from trackingfunctionsdk.common.helpers.dmesg_watcher import DmesgWatcher
from trackingfunctionsdk.common.helpers.gnss_monitor import GnssMonitor
from trackingfunctionsdk.model.dto.ptpstate import PtpState
from trackingfunctionsdk.model.dto.resourcetype import ResourceType
from trackingfunctionsdk.model.dto.rpc_endpoint import RpcEndpointInfo
from trackingfunctionsdk.model.dto.resourcetype import ResourceType
from trackingfunctionsdk.model.dto.ptpstate import PtpState
from trackingfunctionsdk.client.ptpeventproducer import PtpEventProducer
from trackingfunctionsdk.common.helpers import ptpsync as ptpsync
LOG = logging.getLogger(__name__)
from trackingfunctionsdk.common.helpers import log_helper
log_helper.config_logger(LOG)
THIS_NODE_NAME = os.environ.get("THIS_NODE_NAME",'controller-0')
'''Entry point of Default Process Worker'''
def ProcessWorkerDefault(event, sqlalchemy_conf_json, broker_transport_endpoint):
worker = PtpWatcherDefault(event, sqlalchemy_conf_json, broker_transport_endpoint)
worker.run()
return
class PtpWatcherDefault:
DEFAULT_PTPTRACKER_CONTEXT = {
'holdover_seconds': 30,
'poll_freq_seconds': 2
}
class PtpRequestHandlerDefault(object):
def __init__(self, watcher):
self.watcher = watcher
self.init_time = time.time()
def query_status(self, **rpc_kwargs):
self.watcher.ptptracker_context_lock.acquire()
sync_state = self.watcher.ptptracker_context.get('sync_state', PtpState.Freerun)
last_event_time = self.watcher.ptptracker_context.get('last_event_time', time.time())
self.watcher.ptptracker_context_lock.release()
lastStatus = {
'ResourceType': ResourceType.TypePTP,
'EventData': {
'State': sync_state
},
'ResourceQualifier': {
'NodeName': self.watcher.node_name
},
'EventTimestamp': last_event_time
}
return lastStatus
def trigger_delivery(self, **rpc_kwargs):
self.watcher.forced_publishing = True
self.watcher.signal_ptp_event()
pass
def __init__(self, event, sqlalchemy_conf_json, daemon_context_json):
self.sqlalchemy_conf = json.loads(sqlalchemy_conf_json)
self.event = event
self.init_time = time.time()
self.daemon_context = json.loads(daemon_context_json)
self.ptptracker_context = self.daemon_context.get(
'ptptracker_context', PtpWatcherDefault.DEFAULT_PTPTRACKER_CONTEXT)
self.ptptracker_context['sync_state'] = PtpState.Freerun
self.ptptracker_context['last_event_time'] = self.init_time
self.ptptracker_context_lock = threading.Lock()
self.ptp_device_simulated = "true" == self.ptptracker_context.get('device_simulated',
"False").lower()
self.event_timeout = float(self.ptptracker_context['poll_freq_seconds'])
self.node_name = self.daemon_context['THIS_NODE_NAME']
self.namespace = self.daemon_context.get('THIS_NAMESPACE', 'notification')
broker_transport_endpoint = self.daemon_context['NOTIFICATION_TRANSPORT_ENDPOINT']
registration_transport_endpoint = self.daemon_context['REGISTRATION_TRANSPORT_ENDPOINT']
self.broker_endpoint = RpcEndpointInfo(broker_transport_endpoint)
self.registration_broker_endpoint = RpcEndpointInfo(registration_transport_endpoint)
self.ptpeventproducer = PtpEventProducer(
self.node_name,
self.broker_endpoint.TransportEndpoint,
self.registration_broker_endpoint.TransportEndpoint)
self.__ptprequest_handler = PtpWatcherDefault.PtpRequestHandlerDefault(self)
self.forced_publishing = False
self.watcher = DmesgWatcher()
observer_list = [GnssMonitor(i) for i in self.daemon_context['GNSS_CONFIGS']]
for observer in observer_list:
self.watcher.attach(observer)
self.watcher_thread = threading.Thread(target=self.watcher.run_watcher)
def signal_ptp_event(self):
if self.event:
self.event.set()
else:
LOG.warning("Unable to assert ptp event")
pass
def run(self):
# start location listener
self.__start_listener()
# Start dmesg watcher
self.watcher_thread.start()
while True:
# announce the location
forced = self.forced_publishing
self.forced_publishing = False
self.__publish_ptpstatus(forced)
if self.event.wait(self.event_timeout):
LOG.debug("daemon control event is asserted")
self.event.clear()
else:
LOG.debug("daemon control event is timeout")
pass
continue
self.__stop_listener()
'''Start listener to answer querying from clients'''
def __start_listener(self):
LOG.debug("start listener to answer location querying")
self.ptpeventproducer.start_status_listener(
self.__ptprequest_handler
)
return
def __stop_listener(self):
LOG.debug("stop listener to answer location querying")
self.ptpeventproducer.stop_status_listener(self.location_info)
return
def __get_ptp_status(self, holdover_time, freq, sync_state, last_event_time):
new_event = False
new_event_time = last_event_time
if self.ptp_device_simulated:
now = time.time()
timediff = now - last_event_time
if timediff > holdover_time:
new_event = True
new_event_time = now
if sync_state == PtpState.Freerun:
sync_state = PtpState.Locked
elif sync_state == PtpState.Locked:
sync_state = PtpState.Holdover
elif sync_state == PtpState.Holdover:
sync_state = PtpState.Freerun
else:
sync_state = PtpState.Freerun
else:
new_event, sync_state, new_event_time = ptpsync.ptp_status(
holdover_time, freq, sync_state, last_event_time)
return new_event, sync_state, new_event_time
'''announce location'''
def __publish_ptpstatus(self, forced=False):
holdover_time = float(self.ptptracker_context['holdover_seconds'])
freq = float(self.ptptracker_context['poll_freq_seconds'])
sync_state = self.ptptracker_context.get('sync_state', 'Unknown')
last_event_time = self.ptptracker_context.get('last_event_time', time.time())
new_event, sync_state, new_event_time = self.__get_ptp_status(
holdover_time, freq, sync_state, last_event_time)
if new_event or forced:
# update context
self.ptptracker_context_lock.acquire()
self.ptptracker_context['sync_state'] = sync_state
self.ptptracker_context['last_event_time'] = new_event_time
self.ptptracker_context_lock.release()
# publish new event
LOG.debug("publish ptp status to clients")
lastStatus = {
'ResourceType': 'PTP',
'EventData': {
'State': sync_state
},
'ResourceQualifier': {
'NodeName': self.node_name
},
'EventTimestamp': new_event_time
}
self.ptpeventproducer.publish_status(lastStatus)
return
class DaemonControl(object):
def __init__(self, sqlalchemy_conf_json, daemon_context_json, process_worker = None):
self.event = mp.Event()
self.daemon_context = json.loads(daemon_context_json)
self.node_name = self.daemon_context['THIS_NODE_NAME']
if not process_worker:
process_worker = ProcessWorkerDefault
self.sqlalchemy_conf_json = sqlalchemy_conf_json
self.daemon_context_json = daemon_context_json
self.process_worker = process_worker
return
def refresh(self):
self.process_worker(self.event, self.sqlalchemy_conf_json, self.daemon_context_json)
self.event.set()

View File

@ -0,0 +1,77 @@
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import unittest
import mock
from trackingfunctionsdk.common.helpers.cgu_handler import CguHandler
import os
testpath = os.environ.get("TESTPATH", "")
class CguHandlerTests(unittest.TestCase):
testCguHandler = CguHandler(testpath + "test_input_files/ts2phc_valid.conf")
missingCguHandler = CguHandler("./no_such_file.conf")
invalidCguHandler = CguHandler(testpath + "test_input_files/ts2phc_invalid.conf")
def test_get_gnss_nmea_serialport(self):
# Test success path
self.testCguHandler.get_gnss_nmea_serialport_from_ts2phc_config()
self.assertEqual(self.testCguHandler.nmea_serialport, "/dev/ttyGNSS_1800_0")
# Test missing / incorrect config file path
with self.assertRaises(FileNotFoundError):
self.missingCguHandler.get_gnss_nmea_serialport_from_ts2phc_config()
# Test missing nmea_serialport in config
self.invalidCguHandler.get_gnss_nmea_serialport_from_ts2phc_config()
self.assertEqual(self.invalidCguHandler.nmea_serialport,
None)
def test_convert_nmea_serialport_to_pci_addr(self):
# Test success path
self.testCguHandler.get_gnss_nmea_serialport_from_ts2phc_config()
self.testCguHandler.convert_nmea_serialport_to_pci_addr(testpath +
"test_input_files/mock_dmesg")
self.assertEqual(self.testCguHandler.pci_addr, "0000:18:00.0")
# Test pci address not found
self.testCguHandler.nmea_serialport = "/dev/ttyGNSS_not_present"
self.testCguHandler.convert_nmea_serialport_to_pci_addr(testpath +
"test_input_files/mock_dmesg")
self.assertEqual(self.testCguHandler.pci_addr, None)
@mock.patch('trackingfunctionsdk.common.helpers.cgu_handler.os.path')
def test_get_cgu_path_from_pci_addr(self, mock_path):
# Setup mock
mock_path.exists.return_value = True
self.testCguHandler.get_gnss_nmea_serialport_from_ts2phc_config()
self.testCguHandler.convert_nmea_serialport_to_pci_addr(testpath +
"test_input_files/mock_dmesg")
self.testCguHandler.get_cgu_path_from_pci_addr()
self.assertEqual(self.testCguHandler.cgu_path, "/ice/0000:18:00.0/cgu")
mock_path.exists.return_value = False
with self.assertRaises(FileNotFoundError):
self.testCguHandler.get_cgu_path_from_pci_addr()
def test_cgu_output_to_dict(self):
reference_dict = {
'input':
{'CVL-SDP22': {'state': 'invalid', 'priority': {'EEC': '8', 'PPS': '8'}},
'CVL-SDP20': {'state': 'invalid', 'priority': {'EEC': '15', 'PPS': '3'}},
'C827_0-RCLKA': {'state': 'invalid', 'priority': {'EEC': '4', 'PPS': '4'}},
'C827_0-RCLKB': {'state': 'invalid', 'priority': {'EEC': '5', 'PPS': '5'}},
'SMA1': {'state': 'invalid', 'priority': {'EEC': '1', 'PPS': '1'}},
'SMA2/U.FL2': {'state': 'invalid', 'priority': {'EEC': '2', 'PPS': '2'}},
'GNSS-1PPS': {'state': 'valid', 'priority': {'EEC': '0', 'PPS': '0'}}},
'EEC DPLL': {'Current reference': 'GNSS-1PPS', 'Status': 'locked_ho_ack'},
'PPS DPLL': {'Current reference': 'GNSS-1PPS', 'Status': 'locked_ho_ack',
'Phase offset': '295'}}
self.testCguHandler.cgu_path = testpath + "test_input_files/mock_cgu_output"
self.testCguHandler.read_cgu()
self.testCguHandler.cgu_output_to_dict()
self.assertDictEqual(self.testCguHandler.cgu_output_parsed, reference_dict)

View File

@ -0,0 +1,56 @@
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import unittest
import os
from unittest.mock import MagicMock
from trackingfunctionsdk.common.helpers.dmesg_watcher import DmesgWatcher
from trackingfunctionsdk.common.helpers.gnss_monitor import GnssMonitor
testpath = os.environ.get("TESTPATH", "")
class DmesgWatcherTests(unittest.TestCase):
testDmesgWatcher = DmesgWatcher()
observer_a = GnssMonitor(testpath + "./test_input_files/ts2phc_valid.conf",
"tty_GNSS_1800_0", "0000:18:00.0",
testpath + "./test_input_files/mock_cgu_output")
observer_b = GnssMonitor(testpath + "./test_input_files/ts2phc_valid.conf",
"tty_GNSS_1a00_0", "0000:1a:00.0",
testpath + "./test_input_files/mock_cgu_output")
def test_parse_dmesg_event(self):
self.testDmesgWatcher.attach(self.observer_a)
self.testDmesgWatcher.notify = MagicMock()
with open(testpath + "./test_input_files/mock_kern.log", 'r') as dmesg:
for line in dmesg:
self.testDmesgWatcher.parse_dmesg_event(line)
assert self.testDmesgWatcher.notify.called
# Test that notify is not called when there is no match
self.testDmesgWatcher.notify.reset_mock()
self.testDmesgWatcher.attach(self.observer_b)
with open(testpath + "./test_input_files/mock_kern.log", 'r') as dmesg:
for line in dmesg:
self.testDmesgWatcher.parse_dmesg_event(line)
assert self.testDmesgWatcher.notify.assert_not_called
def test_attach_detach(self):
self.testDmesgWatcher.attach(self.observer_a)
self.testDmesgWatcher.attach(self.observer_b)
self.assertEqual(len(self.testDmesgWatcher._observers), 2)
self.testDmesgWatcher.detach(self.observer_a)
self.testDmesgWatcher.detach(self.observer_b)
self.assertEqual(len(self.testDmesgWatcher._observers), 0)
def test_notify(self):
self.observer_a.update = MagicMock
self.testDmesgWatcher.notify(observer=self.observer_a,
matched_line="2022-06-03T19:50:05.959 controller-0 kernel: warning [ "
"4.635511] ice 0000:18:00.0: <DPLL1> state changed to: "
"locked_ho_ack, pin GNSS-1PPS")
assert self.observer_a.update.called

View File

@ -0,0 +1,23 @@
Found ZL80032 CGU
DPLL Config ver: 1.3.0.1
CGU Input status:
| | priority |
input (idx) | state | EEC (0) | PPS (1) |
---------------------------------------------------
CVL-SDP22 (0) | invalid | 8 | 8 |
CVL-SDP20 (1) | invalid | 15 | 3 |
C827_0-RCLKA (2) | invalid | 4 | 4 |
C827_0-RCLKB (3) | invalid | 5 | 5 |
SMA1 (4) | invalid | 1 | 1 |
SMA2/U.FL2 (5) | invalid | 2 | 2 |
GNSS-1PPS (6) | valid | 0 | 0 |
EEC DPLL:
Current reference: GNSS-1PPS
Status: locked_ho_ack
PPS DPLL:
Current reference: GNSS-1PPS
Status: locked_ho_ack
Phase offset: 295

View File

@ -0,0 +1,6 @@
[ 4.834255] ice 0000:18:00.0: ttyGNSS_1800_0 registered
[ 4.834258] ice 0000:18:00.0: GNSS TTY init successful
[ 4.846647] ice 0000:18:00.0: <DPLL0> state changed to: locked_ho_ack, pin GNSS-1PPS
[ 4.859247] ice 0000:18:00.0: <DPLL1> state changed to: locked_ho_ack, pin GNSS-1PPS
[ 8.535739] ice 0000:1a:00.0: ttyGNSS_1a00_0 registered
[ 8.535741] ice 0000:1a:00.0: GNSS TTY init successful

View File

@ -0,0 +1,12 @@
2022-06-03T19:36:08.021 controller-0 kernel: info [ 11.122139] ice 0000:18:00.0: ttyGNSS_1800_0 registered
2022-06-03T19:36:08.021 controller-0 kernel: info [ 11.122142] ice 0000:18:00.0: GNSS TTY init successful
2022-06-03T19:36:08.021 controller-0 kernel: warning [ 11.134103] ice 0000:18:00.0: <DPLL0> state changed to: locked_ho_ack, pin GNSS-1PPS
2022-06-03T19:36:08.021 controller-0 kernel: warning [ 11.148040] ice 0000:18:00.0: <DPLL1> state changed to: locked_ho_ack, pin GNSS-1PPS
2022-06-03T19:36:08.022 controller-0 kernel: info [ 14.405736] ice 0000:1a:00.0: ttyGNSS_1a00_0 registered
2022-06-03T19:36:08.022 controller-0 kernel: info [ 14.405737] ice 0000:1a:00.0: GNSS TTY init successful
2022-06-03T19:50:05.958 controller-0 kernel: info [ 4.609339] ice 0000:18:00.0: ttyGNSS_1800_0 registered
2022-06-03T19:50:05.958 controller-0 kernel: info [ 4.609362] ice 0000:18:00.0: GNSS TTY init successful
2022-06-03T19:50:05.958 controller-0 kernel: warning [ 4.621518] ice 0000:18:00.0: <DPLL0> state changed to: locked_ho_ack, pin GNSS-1PPS
2022-06-03T19:50:05.959 controller-0 kernel: warning [ 4.635511] ice 0000:18:00.0: <DPLL1> state changed to: locked_ho_ack, pin GNSS-1PPS
2022-06-03T19:50:05.959 controller-0 kernel: info [ 7.995939] ice 0000:1a:00.0: ttyGNSS_1a00_0 registered
2022-06-03T19:50:05.959 controller-0 kernel: info [ 7.995941] ice 0000:1a:00.0: GNSS TTY init successful

View File

@ -0,0 +1,24 @@
[global]
##
## Default Data Set
##
leapfile /usr/share/zoneinfo/leap-seconds.list
logging_level 7
ts2phc.pulsewidth 100000000
[ens1f0]
##
## Associated interface: oam0
##
ts2phc.extts_polarity rising
[ens2f0]
##
## Associated interface: data0
##
ts2phc.extts_polarity rising

View File

@ -0,0 +1,25 @@
[global]
##
## Default Data Set
##
leapfile /usr/share/zoneinfo/leap-seconds.list
logging_level 7
ts2phc.nmea_serialport /dev/ttyGNSS_1800_0
ts2phc.pulsewidth 100000000
[ens1f0]
##
## Associated interface: oam0
##
ts2phc.extts_polarity rising
[ens2f0]
##
## Associated interface: data0
##
ts2phc.extts_polarity rising

View File

@ -1 +1,10 @@
# Nothing
sqlalchemy~=1.4.36
wsme
oslo.messaging
pecan~=1.4.1
WebTest
WebOb~=1.8.7
requests~=2.27.1
mock
PyYAML
pygtail~=0.12.0

View File

@ -1,3 +1,4 @@
# hacking pulls in flake8
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
bashate >= 0.2
unittest2

12
tox.ini
View File

@ -1,5 +1,5 @@
[tox]
envlist = linters
envlist = linters,py36
minversion = 2.3
skipsdist = True
sitepackages=False
@ -18,6 +18,16 @@ deps =
whitelist_externals =
bash
[testenv:py36]
basepython = python3.6
setenv =
TESTPATH=./notificationservice-base/centos/docker/ptptrackingfunction/trackingfunctionsdk/tests/
commands =
unit2 discover -s ./notificationservice-base/centos/docker/ptptrackingfunction
deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
[testenv:bashate]
# Treat all E* codes as Errors rather than warnings using: -e 'E*'
commands =