diff --git a/installer/build.py b/installer/build.py index 3cba60f..9e48fc1 100755 --- a/installer/build.py +++ b/installer/build.py @@ -1,7 +1,7 @@ # $Id$ # $HeadURL$ -# Purpose: +# Purpose: # Build installers for upload to hosting services.The goal is to use # this for development and release builds. @@ -61,7 +61,7 @@ def main(): # ##################### # Project Files # ##################### - # These need to stay as separate values from + # These need to stay as separate values from # files_with_placeholder_content var due to use in build functions? CX_FREEZE_SETUP = EXPORT_PATH + os.sep + 'setup_freeze.py' @@ -96,7 +96,7 @@ def main(): def export_svn(url_or_wkco, EXPORT_PATH): """Exports clean files from SVN working copy""" - if INFO_ON: print '[INFO] Exporting files from %s' % url_or_wkco + if INFO_ON: print('[INFO] Exporting files from %s' % url_or_wkco) client = pysvn.Client() # http://pysvn.tigris.org/docs/pysvn_prog_ref.html#pysvn_client_export @@ -116,16 +116,16 @@ def compile_python_code(python_setup_file): installation of Python""" if os.path.exists(PACKAGE_DIR): - if INFO_ON: - print '[INFO] Compiled Python code exists, skipping compilation' + if INFO_ON: + print('[INFO] Compiled Python code exists, skipping compilation') else: - if INFO_ON: print '[INFO] Compiling Python code' + if INFO_ON: print('[INFO] Compiling Python code') # Using triple quotes to handle path with spaces compile_command = """python "%s" build """ % python_setup_file result = os.system(compile_command) if DEBUG_ON: - print "The result of the Python code compile is: %s" % result + print("The result of the Python code compile is: %s" % result) # FIXME: Function references several 'CONSTANTS' without them being passed. @@ -136,7 +136,7 @@ def update_package_dir(PACKAGE_DIR): if not os.path.exists(PACKAGE_DIR): # Move compiled files to 'package' dir. - os.rename(EXPORT_PATH + os.sep + 'build\exe.win32-' \ + os.rename(EXPORT_PATH + os.sep + r'build\exe.win32-' \ + INSTALLED_PYTHON_VERSION, PACKAGE_DIR) os.rmdir(EXPORT_PATH + os.sep + 'build') @@ -159,13 +159,13 @@ def update_version_tag_in_files(files, release_version): """Update placeholder version information within a list of files""" for file in files: - if INFO_ON: print "[INFO] Updating version tag in: %s" % file + if INFO_ON: print("[INFO] Updating version tag in: %s" % file) # Open tmp file, read in orig and make changes in tmp file. o = open("updated_file.tmp","a") for line in open(file): line = line.replace(APP_RELEASE_VER_PLACEHOLDER, release_version) - o.write(line) + o.write(line) o.close() # Replace original with updated copy @@ -181,7 +181,7 @@ def create_src_archive(src_dir, dst_dir, release_version): dest_file = dst_dir + os.sep + dest_file - if INFO_ON: print "Creating source archive of %s" % src_dir + if INFO_ON: print("Creating source archive of %s" % src_dir) # Max compression, Multi-threading on, Solid archive creation on archive_command = """%s -t7z -mx=9 -mmt=on -ms=on a "%s" "%s" """ % \ (archive_app, dest_file, src_dir) @@ -191,14 +191,14 @@ def create_binary_archive(src_dir, dst_dir, release_version): """Create archives for distribution""" os.chdir(src_dir) - + archive_app = "7z.exe" dest_file = """%s-%s-win32-bin.7z""" % \ (APPLICATION_NAME.lower(), release_version) dest_file = dst_dir + os.sep + dest_file - if INFO_ON: print "Creating binary archive of %s" % src_dir + if INFO_ON: print("Creating binary archive of %s" % src_dir) # Max compression, Multi-threading on, Solid archive creation on archive_command = """%s -t7z -mx=9 -mmt=on -ms=on a "%s" "%s" """ % \ (archive_app, dest_file, src_dir) @@ -208,7 +208,7 @@ def build_innosetup_installer(project_file, release_version, OUTPUT_DIR, \ revision): """Produce an Inno Setup installer""" - if INFO_ON: print '[INFO] Compiling Inno Setup project' + if INFO_ON: print('[INFO] Compiling Inno Setup project') # Set iss_version to 1.0.SVNRevision # Note: This is the installer file version, NOT Synclosure version. @@ -227,7 +227,7 @@ def build_innosetup_installer(project_file, release_version, OUTPUT_DIR, \ """iscc /Q %s /O"%s" "%s" """ % \ (iscc_cmd_line_vars, OUTPUT_DIR, project_file) - if DEBUG_ON: print compile_command + if DEBUG_ON: print(compile_command) os.system(compile_command) def get_base_name(file_name): @@ -243,7 +243,7 @@ def build_wix_project(src_files, project_file, release_version, \ os.chdir(src_files) - if INFO_ON: print '[INFO] Compiling WiX project' + if INFO_ON: print('[INFO] Compiling WiX project') # If this is a dev build, set WiX project version to 0.0.SVNRevision # Otherwise, set WiX project version to release_version @@ -286,17 +286,17 @@ def build_wix_project(src_files, project_file, release_version, \ project_files_dir, get_base_name(heat_file), src_files, \ output_file_full_path, wix_extensions) - if DEBUG_ON: print "\nheat_command: %s" % heat_command - if DEBUG_ON: print "\ncandle_command: %s" % candle_command - if DEBUG_ON: print "\nlight_command: %s" % light_command + if DEBUG_ON: print("\nheat_command: %s" % heat_command) + if DEBUG_ON: print("\ncandle_command: %s" % candle_command) + if DEBUG_ON: print("\nlight_command: %s" % light_command) - if INFO_ON: print " * Calling heat ..." + if INFO_ON: print(" * Calling heat ...") os.system (heat_command) - if INFO_ON: print " * Calling candle ..." + if INFO_ON: print(" * Calling candle ...") os.system (candle_command) - if INFO_ON: print "\n * Calling light ..." + if INFO_ON: print("\n * Calling light ...") os.system (light_command) def cleanup_build_env(dirs_to_remove, BUILD_DIR, MAX_ATTEMPTS, \ @@ -306,15 +306,15 @@ def cleanup_build_env(dirs_to_remove, BUILD_DIR, MAX_ATTEMPTS, \ if cleanup_attempts == MAX_ATTEMPTS: sys.exit("[ERROR] Problems cleaning build env: %s" % cleanup_error) - if INFO_ON: print "[INFO] Cleaning build directory" + if INFO_ON: print("[INFO] Cleaning build directory") os.chdir(BUILD_DIR) for dir in dirs_to_remove: if os.path.exists(dir): - if DEBUG_ON: print " * [DEBUG] Attempting to remove %s" % dir + if DEBUG_ON: print(" * [DEBUG] Attempting to remove %s" % dir) try: shutil.rmtree(dir) - except Exception, cleanup_error: + except Exception as cleanup_error: # If there are problems removing exported files, wait a few # moments and try again until MAX_ATTEMPTS is reached. time.sleep(3) @@ -332,8 +332,8 @@ def cleanup_build_env(dirs_to_remove, BUILD_DIR, MAX_ATTEMPTS, \ # Initial Setup ##################################### - if INFO_ON: print "[INFO] Starting %s (%s) " % \ - (os.path.basename(sys.argv[0]), DATE) + if INFO_ON: print("[INFO] Starting %s (%s) " % \ + (os.path.basename(sys.argv[0]), DATE)) cleanup_build_env(DIRS_TO_REMOVE_DURING_CLEANUP, BUILD_DIR, MAX_ATTEMPTS) @@ -354,12 +354,12 @@ def cleanup_build_env(dirs_to_remove, BUILD_DIR, MAX_ATTEMPTS, \ else: release_version = APPLICATION_RELEASE_VERSION - if DEBUG_ON: - print "[DEBUG] release_version is %s" % release_version + if DEBUG_ON: + print("[DEBUG] release_version is %s" % release_version) if INFO_ON: - print '[INFO] Attempting to build %s %s' \ - % (APPLICATION_NAME, release_version) + print('[INFO] Attempting to build %s %s' \ + % (APPLICATION_NAME, release_version)) update_version_tag_in_files(files_with_placeholder_content, release_version) diff --git a/synclosure.py b/synclosure.py index 039bd25..f7aa783 100755 --- a/synclosure.py +++ b/synclosure.py @@ -12,14 +12,14 @@ import sys import os import os.path -import urllib2 -import ConfigParser +import urllib.request, urllib.error, urllib.parse +import configparser # parse command line arguments, 'sys.argv' from optparse import OptionParser import re -import httplib +import http.client import time import socket @@ -49,7 +49,7 @@ def main(): # Create customized user agent - opener = urllib2.build_opener() + opener = urllib.request.build_opener() opener.addheaders = [('User-agent', __product__)] @@ -107,11 +107,11 @@ def main(): def ShowProductInfo(): """Print out app name, version, copyright and license info""" # FIXME: Improve this - print "\n",__product__, "\n" - print '-' * 65 - print __copyright__ - print __license__ - print '-' * 65, "\n" + print("\n",__product__, "\n") + print('-' * 65) + print(__copyright__) + print(__license__) + print('-' * 65, "\n") def WriteFile(filename, msg): """Wrapper to safely read/write content to source and cache files""" @@ -125,8 +125,8 @@ def WriteFile(filename, msg): f.write(msg) f.close() except: - print nl+'[error] couldnt create/access/read file (' \ - + filename + '), ' + 'check permissions.'+nl + print(nl+'[error] couldnt create/access/read file (' \ + + filename + '), ' + 'check permissions.'+nl) return False #load a file into a list, ignore lines beginning with a '#' @@ -154,57 +154,57 @@ def DownloadFile(url, downloadfolder, retrylimit, waittime): """Wrapper for urlopen to make use of retrylimit, waittime values""" # Filter out invalid content from author's XML feed - if debugmodeon: print "original url is %s" % url + if debugmodeon: print("original url is %s" % url) url = SanitizeName(url, urlfilter, 'url') - if debugmodeon: print "sanitized url is %s" % url + if debugmodeon: print("sanitized url is %s" % url) try: # Create a file handle for enclosure (after redirects). Use customized user agent. remotefile_fh = opener.open(url) - except urllib2.HTTPError, e: + except urllib.error.HTTPError as e: # FIXME: Loop through a dictionary and define both template output and action based on that? if e.code == 404: - print '*' * 60 - print '[WARNING] NOT FOUND:', url + print('*' * 60) + print('[WARNING] NOT FOUND:', url) if ignorenotfound: - print '[NOTICE ] Adding url to cache' + print('[NOTICE ] Adding url to cache') WriteFile(cache, url+'\n') - print '*' * 60, "\n\n" + print('*' * 60, "\n\n") oldenclosures.append(url) elif e.code == 403: - print '*' * 60 - print '[WARNING] ACCESS DENIED:', url + print('*' * 60) + print('[WARNING] ACCESS DENIED:', url) if ignorenotfound: - print '[NOTICE ] Adding url to cache' + print('[NOTICE ] Adding url to cache') WriteFile(cache, url+'\n') - print '*' * 60, "\n" + print('*' * 60, "\n") oldenclosures.append(url) else: - print "geturl HTTPError %s on url %s" % (e.code, url) + print("geturl HTTPError %s on url %s" % (e.code, url)) pass # FIXME: Is this being handled? - May not be worth worrying about? # FIXME: This will need better handling - except urllib2.URLError, e: - print "geturl URLError %s on url %s" % (e.reason, url) + except urllib.error.URLError as e: + print("geturl URLError %s on url %s" % (e.reason, url)) # Perhaps handle socket.error differently? - except (socket.timeout, socket.error, IOError, httplib.BadStatusLine, httplib.IncompleteRead), errdesc: + except (socket.timeout, socket.error, IOError, http.client.BadStatusLine, http.client.IncompleteRead) as errdesc: # Presumably the server have borked the connection for an unknown reason. Let's try again. if not retrylimit == 0: (dir, file) = os.path.split(url) - print '*' * 60 - print "[WARNING] Failed to download %s to %s" % (file, dir) - print "\t Error Description: ", errdesc, "\n" - print "\tRetrying ..." - print '*' * 60 + print('*' * 60) + print("[WARNING] Failed to download %s to %s" % (file, dir)) + print("\t Error Description: ", errdesc, "\n") + print("\tRetrying ...") + print('*' * 60) time.sleep(waittime) retrylimit -=1 DownloadFile(url, downloadfolder, retrylimit, waittime) else: # Give up on this file (for this session) and proceed to the next one - print "\t\tRetry limit exhausted, moving on to next file" + print("\t\tRetry limit exhausted, moving on to next file") else: # no problems encountered thus far @@ -217,28 +217,28 @@ def DownloadFile(url, downloadfolder, retrylimit, waittime): enclosurefilename = os.path.join(downloadfolder, remotefile) if debugmodeon: - print "Original enclosure url:", url - print "Enc url after redirect:", finalurl + print("Original enclosure url:", url) + print("Enc url after redirect:", finalurl) localfilename = SanitizeName(enclosurefilename, filenamefilter, type='file') try: localfile_fh = open(localfilename, 'wb') - except IOError, errdesc: + except IOError as errdesc: # FIXME: This "if" section 'may' not be necessary if the 404 section catches all of the invalid links if len(remotefile) == 0: # FIXME: Update comment - make sense? # If the url listed in the enclosure was not to a file then add # it to the cache so we will not try to download it again. # ex: http://example.com/ - print '\n', '*' * 60 - print '[NOTICE ] INVALID LINK ENCOUNTERED' - print '*' * 60 + print('\n', '*' * 60) + print('[NOTICE ] INVALID LINK ENCOUNTERED') + print('*' * 60) if ignoreinvalidlinks: - print '\tAdding: ', url, \ - '\n\tto cache to prevent future download attempts' + print('\tAdding: ', url, \ + '\n\tto cache to prevent future download attempts') # Here we're using the the global 'enclosure' value instead of the sanitized 'url' # value. This is because the check for previously downloaded enclosures in the @@ -247,7 +247,7 @@ def DownloadFile(url, downloadfolder, retrylimit, waittime): WriteFile(cache, enclosure+'\n') oldenclosures.append(enclosure) else: - print "Skipping invalid link" + print("Skipping invalid link") #else: # The problem is most likely a filename issue. Previous revs # bombed out due to invalid characters. @@ -265,30 +265,30 @@ def DownloadFile(url, downloadfolder, retrylimit, waittime): # if the file is currently being downloaded, a Ctrl-C will be caught here except (KeyboardInterrupt, SystemExit): - if debugmodeon: print "here i am after remotefile_fh.read()" + if debugmodeon: print("here i am after remotefile_fh.read()") # If user wishes to remove failed downloaded file, do so if removepartialfile: - if debugmodeon: print "removepartialfile setting is on" + if debugmodeon: print("removepartialfile setting is on") RemoveFile(localfile_fh, localfilename) raise - except (socket.timeout, IOError, httplib.BadStatusLine), errdesc: + except (socket.timeout, IOError, http.client.BadStatusLine) as errdesc: # Presumably the server have borked the connection for an unknown reason. Let's try again. if not retrylimit == 0: (dir, file) = os.path.split(enclosurefilename) - print '*' * 60 - print "[NOTICE] Failed to download %s to %s" % (file, dir) - print "\t Error Description: ", errdesc, "\n" - print "\tRetrying ..." - print '*' * 60 + print('*' * 60) + print("[NOTICE] Failed to download %s to %s" % (file, dir)) + print("\t Error Description: ", errdesc, "\n") + print("\tRetrying ...") + print('*' * 60) time.sleep(waittime) retrylimit -=1 DownloadFile(enclosure, enclosurefilename, retrylimit, waittime) else: # Give up on this file (for this session) and proceed to the next one - print "\t\tRetry limit exhausted, moving on to next file" + print("\t\tRetry limit exhausted, moving on to next file") else: # File was successfully downloaded @@ -310,19 +310,19 @@ def SanitizeName(name, filter, type=""): if str(type).lower() == "folder": cleanname = re.sub(filter, "", name) - if debugmodeon: print "cleanname is %s" % cleanname + if debugmodeon: print("cleanname is %s" % cleanname) return cleanname elif str(type).lower() == "file": # Strip away question mark and all characters follow it. file = name.split('?').pop(0) cleanname = re.sub(filter, "", file) - if debugmodeon: print "cleanname is %s" % cleanname + if debugmodeon: print("cleanname is %s" % cleanname) return cleanname elif str(type).lower() == "url": cleanname = re.sub(filter, "", name) - if debugmodeon: print "cleanname is %s" % cleanname + if debugmodeon: print("cleanname is %s" % cleanname) return cleanname else: @@ -339,12 +339,12 @@ def RemoveFile(localfile_fh, file): localfile_fh.close() # remove file (if exists) - if debugmodeon:print file + if debugmodeon:print(file) if os.path.isfile(file): #urllib.urlcleanup() - print "[NOTICE ]\t* Removing partial file" + print("[NOTICE ]\t* Removing partial file") os.unlink(file) - if debugmodeon:print "just removed file" + if debugmodeon:print("just removed file") ShowProductInfo() @@ -421,7 +421,7 @@ def RemoveFile(localfile_fh, file): downloadfolder = os.getcwd() feedcount = len(feedlist) - print "Beginnging feed processing ..." + print("Beginnging feed processing ...") for feed in feedlist: try: parsed = feedparser.parse(feed, agent=__product__) @@ -429,14 +429,14 @@ def RemoveFile(localfile_fh, file): # If parser did not find a title from the feed url, consider it # to be invalid ... if not 'title' in parsed['feed']: - print "[WARNING] Skipping invalid feed: %s \n" % feed + print("[WARNING] Skipping invalid feed: %s \n" % feed) continue # Don't echo 'parsing' for empty lines # FIXME: Isn't this already being handled by ParseFile? if len(feed) != 0: # Show a countdown of the remaining feeds to be parsed (after this one) using 5 digit padding - print '\n[%.5d left]' % (feedcount -1), 'parsing: ' + parsed['feed']['title'] + print('\n[%.5d left]' % (feedcount -1), 'parsing: ' + parsed['feed']['title']) feedcount -= 1 except KeyboardInterrupt: @@ -446,7 +446,7 @@ def RemoveFile(localfile_fh, file): continue for entry in parsed['entries']: - if entry.has_key('enclosures'): + if 'enclosures' in entry: for _enclosure in entry['enclosures']: enclosure = _enclosure['url'] if enclosure not in oldenclosures: @@ -459,7 +459,7 @@ def RemoveFile(localfile_fh, file): continue try: - print 'downloading: ' + enclosure.split("/")[-1] + print('downloading: ' + enclosure.split("/")[-1]) if usesubfolders: # Apply the regular expression against the title of the RSS Podcast feed and @@ -491,7 +491,7 @@ def RemoveFile(localfile_fh, file): # That's all folks sys.exit("[quitting]\t* Aborting on user request") - print "\nAll feeds parsed. Thank you for using", __product__ + print("\nAll feeds parsed. Thank you for using", __product__) if __name__ == "__main__": main()