diff --git a/README.rst b/README.rst index 2518d63..e0f866e 100644 --- a/README.rst +++ b/README.rst @@ -113,21 +113,24 @@ With this solution, log workflow looks like: | | <---------------------------- | | +----------------+ 2. Fetch data +-----------------+ | - +--------------+ - | - | - 3. Download logs; | - include inventory.yaml | - and build info v - +----------------+ - | | - | Logsender | - | | - +----------------+ + +------------------------------------------------+ + | + 3. Download logs; | + include inventory.yaml | + and build info | + | + V + +----------------+ +----------------+ + | | | | + | Logsender | <------------ | Download dir | + | | | | + +----------------+ +----------------+ | 4. Parse log files; | add required fields; | - send to Opensearch v + send to Opensearch | + | + v +-----------------+ | | | Opensearch | diff --git a/logscraper/logscraper.py b/logscraper/logscraper.py index 2902fac..8c05126 100755 --- a/logscraper/logscraper.py +++ b/logscraper/logscraper.py @@ -113,7 +113,7 @@ def get_arguments(): parser.add_argument("--job-name", help="CI job name(s). Parameter can be " "set multiple times. If not set it would scrape " "every latest builds.", action='append') - parser.add_argument("--gearman-server", help="Gearman host addresss") + parser.add_argument("--gearman-server", help="Gearman host address") parser.add_argument("--gearman-port", help="Gearman listen port.") parser.add_argument("--follow", help="Keep polling zuul builds", type=bool, default=True) @@ -151,7 +151,7 @@ def get_config_args(config_path): def parse_args(app_args, config_args): if not config_args: - logging.warning("Can not get informations from config files") + logging.warning("Can not get information from config files") # NOTE: When insecure flag is set as an argument, the value is False, # so if insecure is set to True in config file, it should also be False. @@ -452,7 +452,7 @@ def load_config(config_path): except FileNotFoundError: logging.critical("Can not find provided config file! %s" % config_path) except Exception as e: - logging.critical("Exception occured on reading config file %s" % e) + logging.critical("Exception occurred on reading config file %s" % e) def get_files_to_check(config): @@ -571,7 +571,7 @@ def setup_logging(debug): def run_build(build): - """Submit job informations into log processing system. + """Submit job information into log processing system. If CI job result is different than 'SUSSESS' or 'FAILURE' and download argument is set, it will create special file: 'custom-job-results.txt' @@ -599,7 +599,7 @@ def run_build(build): except PermissionError: logging.critical("Can not create directory %s" % directory) except Exception as e: - logging.critical("Exception occured %s on creating dir %s" % ( + logging.critical("Exception occurred %s on creating dir %s" % ( e, directory)) if is_job_with_result(build): @@ -618,7 +618,7 @@ def run_build(build): save_build_info(directory, build) else: # NOTE: As it was earlier, logs that contains status other than - # "SUCCESS" or "FAILUE" will be parsed by Gearman service. + # "SUCCESS" or "FAILURE" will be parsed by Gearman service. logging.debug("Parsing content for gearman service") results = dict(files=[], jobs=[], invocation={}) files = check_specified_files(build, args.insecure) @@ -660,7 +660,7 @@ def run_scraping(args, zuul_api_url, job_name=None): args.max_skipped, config.build_cache, job_name): logging.debug("Working on build %s" % build['uuid']) - # add missing informations + # add missing information build["tenant"] = config.tenant build["build_args"] = args build["config_file"] = config.config_file @@ -711,7 +711,7 @@ def main(): setup_logging(args.debug) if args.download and args.gearman_server and args.gearman_port: logging.critical("Can not use logscraper to send logs to gearman " - "and dowload logs. Choose one") + "and download logs. Choose one") sys.exit(1) while True: run(args) diff --git a/logscraper/logsender.py b/logscraper/logsender.py index f1ccc72..0c73d0f 100755 --- a/logscraper/logsender.py +++ b/logscraper/logsender.py @@ -91,7 +91,7 @@ def get_arguments(): def _is_file_not_empty(file_path): """Return True when buildinfo file is not empty""" # NOTE: we can assume, that when file exists, all - # content have been dowloaded to the directory. + # content have been downloaded to the directory. return os.path.getsize(file_path) > 0 @@ -139,7 +139,7 @@ def get_ready_directories(directory): log_files[build_uuid] = files else: logging.info("Skipping build with uuid %s. Probably all files " - "are not dowloaded yet." % build_uuid) + "are not downloaded yet." % build_uuid) continue return log_files @@ -314,7 +314,7 @@ def send_to_es(build_file, es_fields, es_client, index, chunk_size, doc_type, except opensearch_exceptions.TransportError as e: logging.critical("Can not send message to Opensearch. Error: %s" % e) except Exception as e: - logging.critical("An error occured on sending message to " + logging.critical("An error occurred on sending message to " "Opensearch %s" % e)