From b71f87545d00fe1341022817e5e8f99a84332676 Mon Sep 17 00:00:00 2001 From: deoren Date: Sat, 9 Jun 2018 22:30:35 -0500 Subject: [PATCH] Replace print calls with logging module More work to do later, but the current state relies upon the standard library logging module and related config settings to control output. Future work will likely result in related logging settings being moved to an external config file to make it easier to modify specific log settings. refs WhyAskWhy/automated-tickets#21 --- automated_tickets.ini | 19 +-- automated_tickets.py | 192 +++++++++++++++--------- automated_tickets_lib.py | 308 +++++++++++++++++---------------------- references.md | 5 + 4 files changed, 275 insertions(+), 249 deletions(-) diff --git a/automated_tickets.ini b/automated_tickets.ini index 8875b21..d04cb2a 100644 --- a/automated_tickets.ini +++ b/automated_tickets.ini @@ -15,31 +15,28 @@ ############################################################ # Disables sending notifications and any other possibly destructive action -testing_mode = true +testing_mode = false -# Set to true for verbose output as the script executes. Useful for troubleshooting. -display_debug_messages = true +# Set to true for verbose output to the current console/terminal as the script +# executes. Useful for troubleshooting. +display_console_debug_messages = false # Less verbose output. Enabling this provides status details. -display_info_messages = true +display_console_info_messages = false # Output warning messages which indicate that a desired step was not # completed as expected, but the issue does not appear to be severe # enough to stop the script from completing in an "acceptable" manner. -# See the 'fail_on_warnings' option in case you would rather not -# continue operation when warning conditions are encountered -# TODO: Add support for this -display_warning_messages = true +display_console_warning_messages = false # Output error messages which block proper operation of the script -# TODO: Add support for this -display_error_messages = true +display_console_error_messages = false # If enabled, this treats all warnings as failures and exits immediately with # one or more messages indicating that this option is enabled and what warning # condition occured # TODO: Add support for this -fail_on_warnings = false +# fail_on_warnings = false # If enabled, events/tasks that are flagged as an intern_task are processed # like all other matching events. If false, then those events are filtered out diff --git a/automated_tickets.py b/automated_tickets.py index 3620d9f..b10347a 100644 --- a/automated_tickets.py +++ b/automated_tickets.py @@ -17,42 +17,72 @@ # parse command line arguments, 'sys.argv' import argparse - +import logging +import logging.handlers import os import os.path import sys +app_name = 'automated-tickets' +# TODO: Configure formatter to log function/class info +syslog_formatter = logging.Formatter('%(name)s - %(levelname)s - %(funcName)s - %(message)s') +file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(funcName)s - %(levelname)s - %(message)s') +stdout_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(funcName)s - %(message)s') -########################################################################## -# This controls output display prior to user configurable values -# being read in and honored. Uncomment and disable/enable as needed -# for debugging purposes -#automated_tickets_lib.DISPLAY_DEBUG_MESSAGES = True -#automated_tickets_lib.DISPLAY_INFO_MESSAGES = True -#automated_tickets_lib.DISPLAY_WARNING_MESSAGES = True -#automated_tickets_lib.DISPLAY_ERROR_MESSAGES = True +# Grab root logger and set initial logging level +root_logger = logging.getLogger() +root_logger.setLevel(logging.INFO) + +# The SysLogHandler class, supports sending logging messages to a remote +# or local Unix syslog. +# TODO: Expose this value elsewhere; move to logging_config.json? +syslog_socket = '/dev/log' +try: + syslog_handler = logging.handlers.SysLogHandler(address=syslog_socket) +except AttributeError: + # We're likely running on Windows, so use the NullHandler here + syslog_handler = logging.NullHandler +else: + # Good thus far, finish configuring SysLogHandler + syslog_handler.ident = app_name + ": " + syslog_handler.setFormatter(syslog_formatter) + syslog_handler.setLevel(logging.INFO) + +console_handler = logging.StreamHandler(stream=sys.stdout) +console_handler.setFormatter(stdout_formatter) +# Apply lax logging level since we will use a filter to examine message levels +# and compare against allowed levels set within the main config file. This +# filter is added later once the settings config object has been constructed. +console_handler.setLevel(logging.NOTSET) + +file_handler = logging.FileHandler(app_name + '.log', mode='a') +file_handler.setFormatter(file_formatter) +file_handler.setLevel(logging.DEBUG) + +# Create logger object that inherits from root and will be inherited by +# all modules used by this project +# Note: The console_handler is added later after the settings config object +# has been constructed. +app_logger = logging.getLogger(app_name) +app_logger.addHandler(syslog_handler) +app_logger.addHandler(file_handler) +app_logger.setLevel(logging.DEBUG) + +log = app_logger.getChild(__name__) + +log.debug("Logging initialized for %s", __name__) + + +# TODO: Setup support for reading in environmental variable value +# in place of hard-coding values here. # -# Basic import so that we can manipulate global variables -import automated_tickets_lib +import automated_tickets_lib as atlib ########################################################################## - -# Our custom classes -from automated_tickets_lib import Settings - -# Consants -from automated_tickets_lib import DATE_LABEL - -# Our custom "print" fuctions -from automated_tickets_lib import print_debug, print_error, print_info, print_warning - -from automated_tickets_lib import get_events, get_wiki_page_contents, send_notification - -from automated_tickets_lib import get_include_calls, get_included_wiki_pages - +log.debug("Finished importing standard modules and our custom library modules.") ######################################################## # Collect command-line arguments passed by Cron @@ -69,7 +99,7 @@ required=True, # Reuse keys from DATE_LABEL dict in library file instead of repeating here - choices=list(DATE_LABEL.keys()) + choices=list(atlib.DATE_LABEL.keys()) ) # NOTE: Probably want to leave this as not required and fall back to checking @@ -78,16 +108,18 @@ parser.add_argument('--config_file', action='store', required=False) try: + log.info('Parsing commandline options') args = parser.parse_args() -except Exception as e: - print_error("Unable to parse command-line arguments: {}".format(e), "argparse") +except Exception as error: + log.exception("Unable to parse command-line arguments: %s", error) sys.exit(1) # NOTE: The command-line options parser enforces specific values and requires # that at least one of those values is present. event_schedule = args.event_schedule -if args.config_file: +# TODO: Confirm 'None' is correct fallback value +if args.config_file is not None: global_config_file = args.config_file else: # FIXME: @@ -132,39 +164,35 @@ config_file_candidates = [config_file['global'], config_file['local']] # Generate configuration setting options -print_debug( - "Passing in these config file locations for evalution: {}".format( - config_file_candidates), "CONFIG") - -settings = Settings(config_file_candidates) - -# Controls status messages for each minor step of the process -# (e.g., pulling data, writing data, what flags are enabled, etc.) -DISPLAY_DEBUG_MESSAGES = settings.flags['display_debug_messages'] -automated_tickets_lib.DISPLAY_DEBUG_MESSAGES = DISPLAY_DEBUG_MESSAGES - -# Controls status messages for each major block -# (e.g., finishing table updates, reporting rows affected, etc.) -DISPLAY_INFO_MESSAGES = settings.flags['display_info_messages'] -automated_tickets_lib.DISPLAY_INFO_MESSAGES = DISPLAY_INFO_MESSAGES - -DISPLAY_WARNING_MESSAGES = settings.flags['display_warning_messages'] -automated_tickets_lib.DISPLAY_WARNING_MESSAGES = DISPLAY_WARNING_MESSAGES - -DISPLAY_ERROR_MESSAGES = settings.flags['display_error_messages'] -automated_tickets_lib.DISPLAY_ERROR_MESSAGES = DISPLAY_ERROR_MESSAGES +log.debug( + "Passing in these config file locations for evalution: %s", + config_file_candidates) +log.info('Parsing config files') +settings = atlib.Settings(config_file_candidates) +# Now that the settings object has been properly created, lets use it to +# finish configuring console logging for the main application logger. +console_handler.addFilter(atlib.ConsoleFilterFunc(settings=settings)) +app_logger.addHandler(console_handler) # Troubleshooting config file flag boolean conversion for key, value in list(settings.flags.items()): - print_debug("key: '{}' value: '{}' type of value: '{}'".format(key, value, type(value)), "MySQL") + log.debug("key: '%s' value: '%s' type of value: '%s'", + key, + value, + type(value)) +if settings.flags['testing_mode']: + log.warning("Test warning message to prove that the INI flag works") + log.error("Test error message to prove that the INI flag works") # Generate list of matching events from database based on requested event # schedule (daily, weekly, etc.) events = [] -events = get_events(settings, event_schedule) + +log.info('Retrieving events') +events = atlib.get_events(settings, event_schedule) message = {} @@ -174,9 +202,7 @@ # validity checks can be grouped together. This can simply be a reference # to those pre-computed values. # FIXME: Reintroduce support for multiple destination email addresses - message['envelope'] = "From: {}\nTo: {}\nSubject: {} ({})\n".format( - event.email_from_address, event.email_to_address, - + message['subject'] = "{} ({})".format( # Formatting the prefix string before then using the result in # the larger format string we're building here # NOTE: Explicitly lowering the case of the dictionary key values @@ -184,14 +210,21 @@ # the associated value. # FIXME: Add try/except blocks around expansion so that any KeyError # exceptions can be caught and processing can continue. - event.email_subject_prefix.format(DATE_LABEL[event.event_schedule].lower()), + event.email_subject_prefix.format(atlib.DATE_LABEL[event.event_schedule].lower()), event.event_schedule.lower() + ) + message['envelope'] = "From: {}\nTo: {}\nSubject: {}\n".format( + event.email_from_address, event.email_to_address, message['subject'] ) - print_debug(message['envelope'], "Email envelope details") + log.debug("Email envelope details: %s", message['envelope']) # If this task has a known due date ... if event.redmine_new_issue_due_date: + log.debug("Due date of %s is set for task: %s", + event.redmine_new_issue_due_date, + message['subject']) + message['footer'] = "\nProject: {}\nCategory: {}\nStatus: {}\nPriority: {}\nDue date: {}\n".format( event.redmine_new_issue_project, event.redmine_new_issue_category, @@ -200,6 +233,7 @@ event.redmine_new_issue_due_date ) else: + log.debug("No due date set for %s", message['envelope']) message['footer'] = "\nProject: {}\nCategory: {}\nStatus: {}\nPriority: {}\n".format( event.redmine_new_issue_project, event.redmine_new_issue_category, @@ -207,8 +241,12 @@ event.redmine_new_issue_priority ) + log.debug("Fetching initial wiki page contents for page %s from project %s", + event.redmine_wiki_page_name, + event.redmine_wiki_page_project_shortname) + # Get the raw contents of the wiki page associated with the event - wiki_page_contents = get_wiki_page_contents( + wiki_page_contents = atlib.get_wiki_page_contents( settings, event.redmine_wiki_page_name, event.redmine_wiki_page_project_shortname, @@ -220,20 +258,29 @@ # (dependency free) page is used as the body of the message if settings.flags['expand_include_macros_in_wiki_pages']: + log.debug("Enabled: Expand include macros found in wiki pages") # Check wiki_page_contents for include macro calls and build a # list of included pages to fetch the content from. - wiki_page_macro_calls = get_include_calls( + wiki_page_macro_calls = [] + wiki_page_macro_calls = atlib.get_include_calls( wiki_page_contents, event.redmine_wiki_page_project_shortname ) + log.debug("Include calls found: %s", bool(wiki_page_macro_calls)) + + # TODO: used in a strictly boolean context; from PEP8: + # For sequences, (strings, lists, tuples), use the fact that + # empty sequences are false while wiki_page_macro_calls: - print_debug(wiki_page_macro_calls) + log.debug("Wiki page include macro calls found: %s", + wiki_page_macro_calls) # proceed with getting the list of included pages + log.debug("Fetching included wiki pages ...") included_wiki_pages = [] - included_wiki_pages = get_included_wiki_pages( + included_wiki_pages = atlib.get_included_wiki_pages( wiki_page_macro_calls, event.redmine_wiki_page_project_shortname) @@ -241,7 +288,12 @@ while included_wiki_pages: wiki_page_to_process = included_wiki_pages.pop() - included_wiki_page_contents = get_wiki_page_contents( + + log.debug("Fetching wiki page contents for included page %s from project %s", + wiki_page_to_process, + event.redmine_wiki_page_project_shortname) + + included_wiki_page_contents = atlib.get_wiki_page_contents( settings, wiki_page_to_process, event.redmine_wiki_page_project_shortname, @@ -253,18 +305,21 @@ event.redmine_wiki_page_project_shortname, wiki_page_to_process) - print_debug(search_value, "Search string for include call") + log.debug("Search string for include call: %s", search_value) wiki_page_contents = wiki_page_contents.replace(search_value, included_wiki_page_contents) # After we have processed all initial include pages, check again to # see if pulling in the content from those included pages resulted # in us finding more include macro calls. - wiki_page_macro_calls = get_include_calls( + log.debug("Parsing wiki page contents for further include calls ...") + wiki_page_macro_calls = atlib.get_include_calls( wiki_page_contents, event.redmine_wiki_page_project_shortname ) - + else: + log.debug("Disabled: Expand include macros found in wiki pages") + log.debug("Redmine will substitute macros with live include page contents") # Use wiki page contents as the message body. This is either the fully # expanded content after include macro calls have been processed or the @@ -274,6 +329,7 @@ # FIXME: Revisit this? + log.debug("FIXME: Leaving header empty") message['header'] = "" # Note: @@ -289,5 +345,9 @@ ) # Send notification - send_notification(settings, event.email_from_address, event.email_to_address, email_message) + log.info('Sending email notification') + atlib.send_notification(settings, event.email_from_address, event.email_to_address, email_message) +# Informs the logging system to perform an orderly shutdown by flushing and +# closing all handlers. +logging.shutdown() diff --git a/automated_tickets_lib.py b/automated_tickets_lib.py index e75f2ae..81af1b0 100644 --- a/automated_tickets_lib.py +++ b/automated_tickets_lib.py @@ -4,6 +4,9 @@ Library module used by the automated_tickets.py script. Not intended for direct use. """ +# TODO: Consider adding a hook to each function to provide this value +# for debug output (troubleshooting) +# name_of_this_function = sys._getframe().f_code.co_name ######################################## @@ -12,11 +15,33 @@ import configparser import datetime +import logging +import logging.handlers import os import re import smtplib import sys + +if __name__ == "__main__": + sys.exit("This module is meant to be imported, not executed directly.") + +######################################## +# Modules - Library/config settings +######################################## + +app_name = 'automated-tickets' + +# Create module-level logger object that inherits from "app" logger settings +log = logging.getLogger(app_name).getChild(__name__) + +# TODO: How to mute messages from this library module by default? +# TODO: Set NullHandler +# log.addHandler(logging.NullHandler) + +log.debug("Logging initialized for %s", __name__) + + ######################################## # Modules - Third party ######################################## @@ -29,13 +54,10 @@ # # * sudo apt-get install mysql-connector-python # * pip install mysql-connector-python --user +log.debug("Attempting to import mysql.connector module") import mysql.connector as mysql - -if __name__ == "__main__": - sys.exit("This module is meant to be imported, not executed directly.") - ####################################################### # Variables, constants ####################################################### @@ -44,27 +66,6 @@ DATE = datetime.date.today() TODAY = DATE.strftime('%Y-%m-%d') -# Disable various modes by default. Will be overriden by main script that -# imports this module -DISPLAY_DEBUG_MESSAGES = False -DISPLAY_INFO_MESSAGES = False - -# Going to assume we want these by default, we can override in config file -DISPLAY_WARNING_MESSAGES = True -DISPLAY_ERROR_MESSAGES = True - -# Ref #4: Background coloring provided by ASCII escape sequences -BACKGROUND_COLORS = { - 'DEBUG': '\033[95m', - 'OKBLUE': '\033[94m', - 'OKGREEN': '\033[92m', - 'WARNING': '\033[93m', - 'FAIL': '\033[91m', - 'ENDC': '\033[0m', - 'BOLD': '\033[1m', - 'UNDERLINE': '\033[4m', -} - # TODO: Am I using these or SQL generated date values? date = datetime.date.today() @@ -113,7 +114,13 @@ class Event(object): def __init__(self, event): - print_debug("{}".format(event), "Event class, input tuple") + self.log = log.getChild(self.__class__.__name__) + + self.log.debug("%s class, input tuple: %s", __class__, event) + + #log.warning("Test warning message to prove that the INI flag works") + #log.error("Test error message to prove that the INI flag works") + # Expand out incoming tuple without hard-coding specific index values (self.email_to_address, @@ -129,7 +136,7 @@ def __init__(self, event): self.event_schedule ) = event - print_debug("new instance of object created", "Event class") + self.log.debug("%s: new instance of object created", __class__) class Settings(object): @@ -141,6 +148,8 @@ class Settings(object): def __init__(self, config_file_list): + self.log = log.getChild(self.__class__.__name__) + try: parser = configparser.SafeConfigParser() @@ -154,7 +163,7 @@ def __init__(self, config_file_list): # dataset. processed_files = parser.read(config_file_list) - print_debug("Config files processed: {}".format(processed_files), "CONFIG") + self.log.debug("CONFIG: Config files processed: %s", processed_files) # FIXME: We can either pass a verified list of files to the parser # OR we can verify the number of processed files is 1 or greater. @@ -166,11 +175,9 @@ def __init__(self, config_file_list): # to be too confusing. raise configparser.ParsingError(config_file_list) - except configparser.ParsingError as err: - - error_message = "Unable to parse config file: {}".format(err) - print_error(error_message, "CONFIG") - sys.exit() + except configparser.ParsingError as error: + self.log.exception("Unable to parse config file: %s", error) + sys.exit(1) # Begin building object by creating dictionary member attributes @@ -210,22 +217,45 @@ def __init__(self, config_file_list): for key in self.flags: self.flags[key] = parser.getboolean('flags', key) - print_debug("{} has a value of {} and a type of {}".format( + self.log.debug("%s has a value of %s and a type of %s", key, self.flags[key], - type(self.flags[key])), "CONFIG") - - except configparser.NoSectionError as err: - - error_message = "{}: Unable to parse config file: {}".format("CONFIG", err) - print_error(error_message) - sys.exit(error_message) - + type(self.flags[key])) + + except configparser.NoSectionError as error: + + self.log.exception("Unable to parse config file: %s", error) + sys.exit(1) + +# Honor boolean flags set within main script config file and only +# output specific log levels to the console. +class ConsoleFilterFunc(logging.Filter): + def __init__(self, settings): + self.settings = settings + #print("Just proving that this function is being called") + def filter(self, record): + if self.settings.flags['display_console_error_messages'] and record.levelname == 'ERROR': + #print("Error messages enabled") + return True + if self.settings.flags['display_console_warning_messages'] and record.levelname == 'WARNING': + #print("Warning messages enabled") + return True + if self.settings.flags['display_console_info_messages'] and record.levelname == 'INFO': + #print("Info messages enabled") + return True + if self.settings.flags['display_console_debug_messages'] and record.levelname == 'DEBUG': + #print("Debug messages enabled") + return True + else: + #print("No matches") + return False ####################################################### # Functions ####################################################### +# TODO: Merge this function since we probably do not need a separate function +# for this. def open_db_connection(settings, database): """ @@ -239,22 +269,19 @@ def open_db_connection(settings, database): try: - print_debug("""MySQL connection details: - - user: {} - password: {} - host: {} - port: {} - database: {} - raise_on_warnings: {} - raise_on_warnings (type): {} - """.format(settings.mysqldb_config['user'], - settings.mysqldb_config['password'], - settings.mysqldb_config['host'], - settings.mysqldb_config['port'], - database, - settings.mysqldb_config['raise_on_warnings'], - type(settings.mysqldb_config['raise_on_warnings'])), "MySQL") + log.debug("DB User: %s", settings.mysqldb_config["user"]) + log.debug("DB Name: %s", database) + log.debug("DB Host Name/IP: %s", settings.mysqldb_config["host"]) + log.debug("DB Host Port: %s", settings.mysqldb_config["port"]) + log.debug("MySQL - raise_on_warnings: %s", + settings.mysqldb_config["raise_on_warnings"]) + log.debug("MySQL - raise_on_warnings type: %s", + type(settings.mysqldb_config["raise_on_warnings"])) + + log.info("Connecting to %s database on %s at port %s", + database, + settings.mysqldb_config["host"], + settings.mysqldb_config["port"]) mysql_connection = mysql.connect( user=settings.mysqldb_config['user'], @@ -266,10 +293,8 @@ def open_db_connection(settings, database): ) except mysql.Error as error: - error_message = "Unable to connect to database: {}".format(error) - print_error(error_message, "MySQL") - sys.exit("MySQL: {}".format(error_message)) - + log.exception("Unable to connect to database: %s", error) + sys.exit(1) return mysql_connection @@ -284,6 +309,8 @@ def get_wiki_page_contents(settings, wiki_page_name, wiki_page_project, wiki_pag # Create cursor object so that we can interact with the database #################################################################### + # TODO: Where is the cursor/connection closed/released? + log.info('Opening connection to database') mysql_connection = open_db_connection(settings, wiki_page_database) # Cursor for the MySQL copy of the database @@ -300,23 +327,29 @@ def get_wiki_page_contents(settings, wiki_page_name, wiki_page_project, wiki_pag query = settings.queries['wiki_page_contents'].format( wiki_page_name, wiki_page_project) - print_debug(query, "Wiki page retrieval query") + log.debug("Wiki page retrieval query: %s", query) + log.info('Executing query') mysql_cursor.execute(query) - except Exception as e: - print_error("Unable to execute wiki page retrieval query: {} ".format(e), "MySQL") - sys.exit() + except Exception as error: + log.exception("Unable to execute wiki page retrieval query: %s", error) + sys.exit(1) try: # Grab first element of returned tuple, ignore everything else wiki_page_content = mysql_cursor.fetchone()[0] - except Exception as e: - + except Exception as error: # FIXME: Is there a Plan B for wiki page lookup failures? - print_error("Unable to retrieve wiki page content: {} ".format(e), "MySQL") - sys.exit() + log.exception("Unable to retrieve wiki page content: %s", error) + sys.exit(1) + finally: + log.debug("Closing cursor ...") + mysql_cursor.close() + + log.debug("Closing connection ...") + mysql_connection.close() if wiki_page_content is not None: @@ -326,9 +359,14 @@ def get_wiki_page_contents(settings, wiki_page_name, wiki_page_project, wiki_pag else: - return "Unable to retrieve content from {}:{}".format( + # TODO: Is this an acceptable outcome? Should this be a hard error? + # The assumption here is that while one entry may be bad, we wish + # to continue processing the others and just note the problem. + error_message = "Unable to retrieve content from {}:{}".format( wiki_page_project, wiki_page_name ) + log.warning(error_message) + return error_message def get_include_calls(wiki_page_contents, wiki_page_project): @@ -348,10 +386,13 @@ def get_include_calls(wiki_page_contents, wiki_page_project): # TODO: Consider moving this to external config file for easy maintenace include_macro_pattern = r'{{include\(%s:[a-zA-Z0-9 _\-\']+\)}}' % (wiki_page_project) + # TODO: Do we properly handle zero results? Yes, the while loop which + # calls this function checks for an empty list to know when to stop + # looping. wiki_page_macro_calls = [] wiki_page_macro_calls = re.findall(include_macro_pattern, wiki_page_contents) - print_debug(wiki_page_macro_calls, "List of include macro calls") + log.debug("List of include macro calls: %s", wiki_page_macro_calls) return wiki_page_macro_calls @@ -378,7 +419,7 @@ def get_included_wiki_pages(wiki_page_macro_calls, wiki_page_project): for match in wiki_page_macro_calls: included_page = re.search(included_page_pattern, match) - if included_page: + if included_page is not None: # Append the first parenthesized subgroup of the match # The equivalent value appears to be 'included_page.groups()[0]' wiki_page_names.append(included_page.group(1)) @@ -387,10 +428,11 @@ def get_included_wiki_pages(wiki_page_macro_calls, wiki_page_project): # macro calls in the primary wiki page. If this situation # occurred then there is a bug somewhere and we need to know # about it. - print_error('No matches found', 'wiki_page_names search') - sys.exit() + # TODO: Raise exception? + log.error('No matching wiki page names found') + sys.exit(1) - print_debug("{}".format(wiki_page_names), "List of included wiki pages") + log.debug("List of included wiki pages: %s", wiki_page_names) return wiki_page_names @@ -406,6 +448,7 @@ def get_events(settings, event_schedule): # Create cursor object so that we can interact with the database #################################################################### + log.info('Opening connection to database') mysql_connection = open_db_connection(settings, settings.mysqldb_config['events_database']) # Cursor for the MySQL copy of the database @@ -433,12 +476,13 @@ def get_events(settings, event_schedule): query = base_query try: + log.info("Executing query") mysql_cursor.execute(query) - except Exception as e: - print_error("Unable to query event_reminders table: {} ".format(e), "MySQL") + except Exception as error: + log.exception("Unable to query event_reminders table: %s", error) - print_debug("Pulling data from {} MySQL table ...".format('events'), "MySQL") + log.debug("Pulling data from %s MySQL table ...", 'events') events = [] for event in mysql_cursor.fetchall(): @@ -459,8 +503,15 @@ def get_events(settings, event_schedule): # Cleanup #################################################################### + # FIXME: Can this be handled with a context manager since there are several + # places where an exception may occur and cause the connecton to be + # uncleanly closed? + + log.debug("Closing cursor ...") + mysql_cursor.close() + # Close database connections - print_debug("Closing database connection ...", "MySQL") + log.debug("Closing database connection ...") mysql_connection.close() return events @@ -477,97 +528,6 @@ def file_can_be_modified(full_path_to_file): return bool(os.access(full_path_to_file, os.W_OK)) -# FIXME: Consider tossing this function since I'm not using it -def get_full_file_path(local_dir, global_dir, file_name): - """ - Returns the full path to an external resource. If the file can be found - locally (same directory as this script) that full path will be returned, - otherwise the full path will be built from the "normal" location - """ - - local_file = os.path.join(local_dir, file_name) - - global_file = os.path.join(global_dir, file_name) - - # TODO: Consider adding a hook to each function to provide this value - # for debug output (troubleshooting) - name_of_this_function = sys._getframe().f_code.co_name - - print_debug("local file is {}".format(local_file), name_of_this_function) - print_debug("global file is {}".format(global_file), name_of_this_function) - - # Attempt to reference local file - if file_exists(local_file): - return local_file - - elif file_exists(global_file): - return global_file - - else: - # FIXME: Raise exception instead? - error_message = "[!] Unable to verify access for '{}' at '{}' or '{}'. Exiting ..." - sys.exit(error_message.format(file_name, local_file, global_file)) - - -def print_debug(message, prefix=""): - """Prints message if the DISPLAY_DEBUG_MESSAGES flag is set""" - - if DISPLAY_DEBUG_MESSAGES: - - if len(prefix) > 0: - prefix = "{}:".format(prefix) - - print("{}[d] {} {}{}".format( - BACKGROUND_COLORS['DEBUG'], - prefix, - # Explicitly convert passed message to string for display - str(message), - BACKGROUND_COLORS['ENDC'])) - -def print_info(message, prefix=""): - """Prints message if the DISPLAY_INFO_MESSAGES flag is set""" - - if DISPLAY_INFO_MESSAGES: - - if len(prefix) > 0: - prefix = "{}:".format(prefix) - - print("{}[i] {} {}{}".format( - BACKGROUND_COLORS['BOLD'], - prefix, - # Explicitly convert passed message to string for display - str(message), - BACKGROUND_COLORS['ENDC'])) - -def print_warning(message, prefix=""): - """Prints warning message to console""" - - if DISPLAY_WARNING_MESSAGES: - - if len(prefix) > 0: - prefix = "{}:".format(prefix) - - print("{}[w] {} {}{}".format( - BACKGROUND_COLORS['WARNING'], - prefix, - # Explicitly convert passed message to string for display - str(message), - BACKGROUND_COLORS['ENDC'])) - -def print_error(message, prefix=""): - """Prints warning message to console""" - - if DISPLAY_ERROR_MESSAGES: - - if len(prefix) > 0: - prefix = "{}:".format(prefix) - - print("{}[!] {} {}{}".format( - BACKGROUND_COLORS['FAIL'], - prefix, - # Explicitly convert passed message to string for display - str(message), - BACKGROUND_COLORS['ENDC'])) # FIXME: Add the from_address and to_address values onto the message object def send_notification(settings, from_address, to_address, message): @@ -577,13 +537,17 @@ def send_notification(settings, from_address, to_address, message): of notifications. For now, only email notifications are supported. """ - print_debug(message, "Notification") + log.debug("Notification: %s", message) email_server = settings.notification_servers['email_server_ip_or_fqdn'] email_debug_filename = 'email.txt' if settings.flags['testing_mode']: + # TODO: With the automated-tickets-dev environment setting up a test + # mail to HTTP submission environment, should the default for test + # mode still be to write to a log file? + # # Fill in details from this run at the end of the file. It is up to # the caller to prune the old file if they wish to have the new # results go to a clean file. diff --git a/references.md b/references.md index 3db5664..e3a8ef7 100644 --- a/references.md +++ b/references.md @@ -26,7 +26,12 @@ - https://dev.mysql.com/doc/connector-python/en/ - https://pypi.org/project/mysql-connector-python/ +### Logging +- https://docs.python.org/3/library/logging.handlers.html +- https://docs.python.org/3/library/logging.html +- https://stackoverflow.com/questions/3968669/how-to-configure-logging-to-syslog-in-python +- [Writing to syslog with Python's logging library](https://gist.github.com/sweenzor/1782457) ### SQLite