2019-11-18 16:58:58 +08:00
|
|
|
#! /usr/bin/env python3
|
2016-08-18 02:33:10 +08:00
|
|
|
|
|
|
|
import argparse
|
|
|
|
import datetime
|
|
|
|
import hashlib # md5
|
|
|
|
import lxml.etree as ET
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import time
|
2017-01-12 22:39:35 +08:00
|
|
|
import sgprops
|
2017-02-10 19:55:30 +08:00
|
|
|
import sys
|
2017-02-27 08:32:01 +08:00
|
|
|
import catalogTags
|
2017-04-12 17:38:24 +08:00
|
|
|
import catalog
|
2019-11-14 00:35:11 +08:00
|
|
|
from catalog import make_aircraft_node, make_aircraft_zip, parse_config_file, parse_template_file
|
2019-11-13 17:59:38 +08:00
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
|
|
|
|
CATALOG_VERSION = 4
|
|
|
|
|
2019-11-18 17:26:40 +08:00
|
|
|
# The Python version.
|
|
|
|
PY_VERSION = sys.version_info[0]
|
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("--update", help="Update/pull SCM source",
|
|
|
|
action="store_true")
|
|
|
|
parser.add_argument("--no-update",
|
|
|
|
help="Disable updating from SCM source",
|
|
|
|
action="store_true")
|
|
|
|
parser.add_argument("--clean", help="Force regeneration of all zip files",
|
|
|
|
action="store_true")
|
2018-10-17 18:04:11 +08:00
|
|
|
parser.add_argument("--quiet", help="Only print warnings and errors",
|
|
|
|
action="store_true")
|
2016-08-18 02:33:10 +08:00
|
|
|
parser.add_argument("dir", help="Catalog directory")
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2017-01-12 22:39:35 +08:00
|
|
|
includes = []
|
2018-10-17 21:16:31 +08:00
|
|
|
mirrors = [] # mirror base URLs
|
2017-01-12 22:39:35 +08:00
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
# xml node (robust) get text helper
|
|
|
|
def get_xml_text(e):
|
|
|
|
if e != None and e.text != None:
|
|
|
|
return e.text
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
# use svn commands to report the last change date within dir
|
|
|
|
def last_change_date_svn(dir):
|
|
|
|
command = [ 'svn', 'info', dir ]
|
|
|
|
result = subprocess.check_output( command )
|
2019-11-18 17:26:40 +08:00
|
|
|
|
|
|
|
# Python 3 compatibility.
|
|
|
|
if PY_VERSION == 3:
|
|
|
|
result = result.decode('utf8')
|
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
match = re.search('Last Changed Date: (\d+)\-(\d+)\-(\d+)', result)
|
|
|
|
if match:
|
|
|
|
rev_str = match.group(1) + match.group(2) + match.group(3)
|
|
|
|
return int(rev_str)
|
|
|
|
|
|
|
|
# find the most recent mtime within a directory subtree
|
|
|
|
def scan_dir_for_change_date_mtime(path):
|
|
|
|
maxsec = 0
|
|
|
|
names = os.listdir(path)
|
|
|
|
for name in names:
|
|
|
|
fullname = os.path.join(path, name)
|
|
|
|
if name == '.' or name == '..':
|
|
|
|
pass
|
|
|
|
elif os.path.isdir( fullname ):
|
|
|
|
mtime = scan_dir_for_change_date_mtime( fullname )
|
|
|
|
if mtime > maxsec:
|
|
|
|
maxsec = mtime
|
|
|
|
else:
|
|
|
|
mtime = os.path.getmtime( fullname )
|
|
|
|
if mtime > maxsec:
|
|
|
|
maxsec = mtime
|
|
|
|
return maxsec
|
|
|
|
|
|
|
|
|
|
|
|
def get_md5sum(file):
|
2019-11-18 17:26:40 +08:00
|
|
|
f = open(file, 'rb')
|
2016-08-18 02:33:10 +08:00
|
|
|
md5sum = hashlib.md5(f.read()).hexdigest()
|
2016-08-19 19:29:50 +08:00
|
|
|
f.close()
|
2016-08-18 02:33:10 +08:00
|
|
|
return md5sum
|
|
|
|
|
2017-01-12 21:42:08 +08:00
|
|
|
def copy_previews_for_variant(variant, package_name, package_dir, previews_dir):
|
|
|
|
if not 'previews' in variant:
|
|
|
|
return
|
|
|
|
|
|
|
|
for preview in variant['previews']:
|
|
|
|
preview_src = os.path.join(package_dir, preview['path'])
|
|
|
|
preview_dst = os.path.join(previews_dir, package_name + '_' + preview['path'])
|
2019-11-18 17:06:24 +08:00
|
|
|
#print(preview_src, preview_dst, preview['path'])
|
2017-02-12 23:14:05 +08:00
|
|
|
dir = os.path.dirname(preview_dst)
|
|
|
|
if not os.path.isdir(dir):
|
|
|
|
os.makedirs(dir)
|
2017-01-12 21:42:08 +08:00
|
|
|
if os.path.exists(preview_src):
|
|
|
|
shutil.copy2(preview_src, preview_dst)
|
|
|
|
|
|
|
|
def copy_previews_for_package(package, variants, package_name, package_dir, previews_dir):
|
|
|
|
copy_previews_for_variant(package, package_name, package_dir, previews_dir)
|
|
|
|
for v in variants:
|
|
|
|
copy_previews_for_variant(v, package_name, package_dir, previews_dir)
|
|
|
|
|
2017-03-06 03:02:49 +08:00
|
|
|
def copy_thumbnail_for_variant(variant, package_name, package_dir, thumbnails_dir):
|
|
|
|
if not 'thumbnail' in variant:
|
|
|
|
return
|
|
|
|
|
|
|
|
thumb_src = os.path.join(package_dir, variant['thumbnail'])
|
|
|
|
thumb_dst = os.path.join(thumbnails_dir, package_name + '_' + variant['thumbnail'])
|
|
|
|
|
|
|
|
dir = os.path.dirname(thumb_dst)
|
|
|
|
if not os.path.isdir(dir):
|
|
|
|
os.makedirs(dir)
|
|
|
|
if os.path.exists(thumb_src):
|
|
|
|
shutil.copy2(thumb_src, thumb_dst)
|
|
|
|
|
|
|
|
def copy_thumbnails_for_package(package, variants, package_name, package_dir, thumbnails_dir):
|
|
|
|
copy_thumbnail_for_variant(package, package_name, package_dir, thumbnails_dir)
|
|
|
|
|
|
|
|
# and now each variant in turn
|
|
|
|
for v in variants:
|
|
|
|
copy_thumbnail_for_variant(v, package_name, package_dir, thumbnails_dir)
|
|
|
|
|
2017-04-12 17:38:24 +08:00
|
|
|
def process_aircraft_dir(name, repo_path):
|
|
|
|
global includes
|
|
|
|
global download_base
|
|
|
|
global output_dir
|
|
|
|
global valid_zips
|
|
|
|
global previews_dir
|
2018-10-17 21:16:31 +08:00
|
|
|
global mirrors
|
2017-04-12 17:38:24 +08:00
|
|
|
|
|
|
|
aircraft_dir = os.path.join(repo_path, name)
|
|
|
|
if not os.path.isdir(aircraft_dir):
|
2017-02-27 08:32:01 +08:00
|
|
|
return
|
|
|
|
|
2017-04-12 17:38:24 +08:00
|
|
|
(package, variants) = catalog.scan_aircraft_dir(aircraft_dir, includes)
|
|
|
|
if package == None:
|
2018-10-17 18:04:11 +08:00
|
|
|
if not args.quiet:
|
2019-11-18 17:06:24 +08:00
|
|
|
print("skipping: %s (no -set.xml files)" % name)
|
2017-04-12 17:38:24 +08:00
|
|
|
return
|
|
|
|
|
2018-10-17 20:58:02 +08:00
|
|
|
if not args.quiet:
|
2019-11-18 17:06:24 +08:00
|
|
|
print("%s:" % name)
|
2017-04-12 17:38:24 +08:00
|
|
|
|
2019-11-13 17:59:38 +08:00
|
|
|
package_node = make_aircraft_node(name, package, variants, download_base, mirrors)
|
2017-04-12 17:38:24 +08:00
|
|
|
|
|
|
|
download_url = download_base + name + '.zip'
|
2017-09-26 19:30:50 +08:00
|
|
|
if 'thumbnail' in package:
|
|
|
|
# this is never even used, but breaks the script by assuming
|
|
|
|
# all aircraft packages have thumbnails defined?
|
|
|
|
thumbnail_url = download_base + 'thumbnails/' + name + '_' + package['thumbnail']
|
2017-04-12 17:38:24 +08:00
|
|
|
|
|
|
|
# get cached md5sum if it exists
|
|
|
|
md5sum = get_xml_text(md5sum_root.find(str('aircraft_' + name)))
|
|
|
|
|
|
|
|
# now do the packaging and rev number stuff
|
|
|
|
dir_mtime = scan_dir_for_change_date_mtime(aircraft_dir)
|
|
|
|
if repo_type == 'svn':
|
|
|
|
rev = last_change_date_svn(aircraft_dir)
|
|
|
|
else:
|
|
|
|
d = datetime.datetime.utcfromtimestamp(dir_mtime)
|
|
|
|
rev = d.strftime("%Y%m%d")
|
|
|
|
package_node.append( catalog.make_xml_leaf('revision', rev) )
|
2019-11-18 17:06:24 +08:00
|
|
|
#print("rev: %s" % rev)
|
|
|
|
#print("dir mtime: %s" % dir_mtime)
|
2017-04-12 17:38:24 +08:00
|
|
|
zipfile = os.path.join( output_dir, name + '.zip' )
|
|
|
|
valid_zips.append(name + '.zip')
|
|
|
|
if not os.path.exists(zipfile) \
|
|
|
|
or dir_mtime > os.path.getmtime(zipfile) \
|
|
|
|
or args.clean:
|
|
|
|
# rebuild zip file
|
2018-10-17 18:04:11 +08:00
|
|
|
if not args.quiet:
|
2019-11-18 17:06:24 +08:00
|
|
|
print("updating: %s" % zipfile)
|
2019-11-13 17:59:38 +08:00
|
|
|
make_aircraft_zip(repo_path, name, zipfile, zip_excludes, verbose=not args.quiet)
|
2017-04-12 17:38:24 +08:00
|
|
|
md5sum = get_md5sum(zipfile)
|
|
|
|
else:
|
2018-10-17 18:04:11 +08:00
|
|
|
if not args.quiet:
|
2019-11-18 17:06:24 +08:00
|
|
|
print("(no change)")
|
2017-04-12 17:38:24 +08:00
|
|
|
if md5sum == "":
|
|
|
|
md5sum = get_md5sum(zipfile)
|
|
|
|
filesize = os.path.getsize(zipfile)
|
|
|
|
package_node.append( catalog.make_xml_leaf('md5', md5sum) )
|
|
|
|
package_node.append( catalog.make_xml_leaf('file-size-bytes', filesize) )
|
|
|
|
|
|
|
|
# handle md5sum cache
|
|
|
|
node = md5sum_root.find('aircraft_' + name)
|
|
|
|
if node != None:
|
|
|
|
node.text = md5sum
|
|
|
|
else:
|
|
|
|
md5sum_root.append( catalog.make_xml_leaf('aircraft_' + name, md5sum) )
|
|
|
|
|
2018-10-17 20:58:02 +08:00
|
|
|
# handle sharing
|
|
|
|
if share_md5sum_root != None:
|
|
|
|
sharedNode = share_md5sum_root.find(str('aircraft_' + name))
|
|
|
|
if node != None:
|
|
|
|
shared_md5 = get_xml_text(sharedNode)
|
|
|
|
if shared_md5 == md5sum:
|
|
|
|
if not args.quiet:
|
2019-11-18 17:06:24 +08:00
|
|
|
print("Sharing zip with share catalog for: %s" % name)
|
2018-10-17 20:58:02 +08:00
|
|
|
os.remove(zipfile)
|
|
|
|
os.symlink(os.path.join( share_output_dir, name + '.zip' ), zipfile)
|
|
|
|
|
|
|
|
|
2017-04-12 17:38:24 +08:00
|
|
|
# handle thumbnails
|
|
|
|
copy_thumbnails_for_package(package, variants, name, aircraft_dir, thumbnail_dir)
|
|
|
|
|
|
|
|
catalog_node.append(package_node)
|
|
|
|
|
|
|
|
# copy previews for the package and variants into the
|
|
|
|
# output directory
|
|
|
|
copy_previews_for_package(package, variants, name, aircraft_dir, previews_dir)
|
2017-02-27 08:32:01 +08:00
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
#def get_file_stats(file):
|
|
|
|
# f = open(file, 'r')
|
|
|
|
# md5 = hashlib.md5(f.read()).hexdigest()
|
|
|
|
# file_size = os.path.getsize(file)
|
|
|
|
# return (md5, file_size)
|
|
|
|
|
|
|
|
if not os.path.isdir(args.dir):
|
2019-11-18 17:06:24 +08:00
|
|
|
print("A valid catalog directory must be provided")
|
2016-08-18 02:33:10 +08:00
|
|
|
exit(0)
|
|
|
|
|
|
|
|
parser = ET.XMLParser(remove_blank_text=True)
|
2019-11-13 21:01:19 +08:00
|
|
|
config_node = parse_config_file(parser=parser, file_name=os.path.join(args.dir, 'catalog.config.xml'))
|
2019-11-14 00:35:11 +08:00
|
|
|
template_node = parse_template_file(parser=parser, file_name=os.path.join(args.dir, 'template.xml'))
|
2016-08-18 02:33:10 +08:00
|
|
|
|
|
|
|
md5sum_file = os.path.join(args.dir, 'md5sum.xml')
|
|
|
|
if os.path.exists(md5sum_file):
|
|
|
|
md5sum_tree = ET.parse(md5sum_file, parser)
|
|
|
|
md5sum_root = md5sum_tree.getroot()
|
|
|
|
else:
|
|
|
|
md5sum_root = ET.Element('PropertyList')
|
|
|
|
md5sum_tree = ET.ElementTree(md5sum_root)
|
|
|
|
|
2018-10-17 20:58:02 +08:00
|
|
|
# share .zip files with other output dirs
|
|
|
|
share_output_dir = get_xml_text(config_node.find('share-output'))
|
|
|
|
share_md5_file = get_xml_text(config_node.find('share-md5-sums'))
|
|
|
|
if share_output_dir != '' and share_md5_file != '':
|
2019-11-18 17:06:24 +08:00
|
|
|
print("Output shared with: %s" % share_output_dir)
|
2018-10-17 20:58:02 +08:00
|
|
|
share_md5sum_tree = ET.parse(share_md5_file, parser)
|
|
|
|
share_md5sum_root = share_md5sum_tree.getroot()
|
|
|
|
else:
|
|
|
|
share_md5sum_root = None
|
|
|
|
|
|
|
|
# SCM providers
|
2016-08-18 02:33:10 +08:00
|
|
|
scm_list = config_node.findall('scm')
|
|
|
|
upload_node = config_node.find('upload')
|
2018-10-17 21:16:31 +08:00
|
|
|
|
|
|
|
download_base = None
|
|
|
|
for i in config_node.findall("download-url"):
|
|
|
|
url = get_xml_text(i)
|
|
|
|
if not url.endswith('/'):
|
|
|
|
url += '/'
|
|
|
|
|
|
|
|
if download_base == None:
|
|
|
|
# download_base is the first entry
|
|
|
|
download_base = url
|
|
|
|
else:
|
|
|
|
mirrors.append(url)
|
2017-04-12 17:38:24 +08:00
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
output_dir = get_xml_text(config_node.find('local-output'))
|
|
|
|
if output_dir == '':
|
|
|
|
output_dir = os.path.join(args.dir, 'output')
|
|
|
|
if not os.path.isdir(output_dir):
|
|
|
|
os.mkdir(output_dir)
|
2017-01-12 21:42:08 +08:00
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
thumbnail_dir = os.path.join(output_dir, 'thumbnails')
|
|
|
|
if not os.path.isdir(thumbnail_dir):
|
|
|
|
os.mkdir(thumbnail_dir)
|
2017-01-12 21:42:08 +08:00
|
|
|
|
|
|
|
previews_dir = os.path.join(output_dir, 'previews')
|
|
|
|
if not os.path.isdir(previews_dir):
|
|
|
|
os.mkdir(previews_dir)
|
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
tmp = os.path.join(args.dir, 'zip-excludes.lst')
|
|
|
|
zip_excludes = os.path.realpath(tmp)
|
|
|
|
|
2017-01-12 22:39:35 +08:00
|
|
|
for i in config_node.findall("include-dir"):
|
|
|
|
path = get_xml_text(i)
|
|
|
|
if not os.path.exists(path):
|
2019-11-18 17:06:24 +08:00
|
|
|
print("Skipping missing include path: %s" % path)
|
2017-01-12 22:39:35 +08:00
|
|
|
continue
|
|
|
|
includes.append(path)
|
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
# freshen repositories
|
|
|
|
if args.no_update:
|
2019-11-18 17:06:24 +08:00
|
|
|
print('Skipping repository updates.')
|
2016-08-18 02:33:10 +08:00
|
|
|
else:
|
|
|
|
cwd = os.getcwd()
|
|
|
|
for scm in scm_list:
|
2019-11-14 18:49:07 +08:00
|
|
|
# XML mandated skip, with command line override.
|
|
|
|
if not args.update:
|
|
|
|
skip = get_xml_text(scm.find('update'))
|
|
|
|
if skip == 'false':
|
|
|
|
continue
|
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
repo_type = get_xml_text(scm.find('type'))
|
|
|
|
repo_path = get_xml_text(scm.find('path'))
|
2017-01-12 22:39:35 +08:00
|
|
|
includes.append(repo_path)
|
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
if repo_type == 'svn':
|
2019-11-18 17:06:24 +08:00
|
|
|
print("SVN update: %s" % repo_path)
|
2016-08-18 02:33:10 +08:00
|
|
|
subprocess.call(['svn', 'update', repo_path])
|
|
|
|
elif repo_type == 'git':
|
2019-11-18 17:06:24 +08:00
|
|
|
print("GIT pull: %s" % repo_path)
|
2016-08-18 02:33:10 +08:00
|
|
|
os.chdir(repo_path)
|
|
|
|
subprocess.call(['git','pull'])
|
|
|
|
elif repo_type == 'no-scm':
|
2019-11-18 17:06:24 +08:00
|
|
|
print("No update of unmannaged files: %s" % repo_path)
|
2016-08-18 02:33:10 +08:00
|
|
|
else:
|
2019-11-18 17:06:24 +08:00
|
|
|
print("Unknown scm type: %s %s" % (scm, repo_path))
|
2016-08-18 02:33:10 +08:00
|
|
|
os.chdir(cwd)
|
|
|
|
|
|
|
|
# names of zip files we want (so we can identify/remove orphans)
|
|
|
|
valid_zips = []
|
|
|
|
|
|
|
|
# create the catalog tree
|
|
|
|
catalog_node = ET.Element('PropertyList')
|
|
|
|
catalog_root = ET.ElementTree(catalog_node)
|
|
|
|
|
|
|
|
# include the template configuration
|
|
|
|
for child in template_node:
|
|
|
|
catalog_node.append(child)
|
|
|
|
|
|
|
|
# scan repositories for catalog information
|
|
|
|
for scm in scm_list:
|
|
|
|
repo_type = get_xml_text(scm.find('type'))
|
|
|
|
repo_path = get_xml_text(scm.find('path'))
|
|
|
|
skip_nodes = scm.findall('skip')
|
|
|
|
skip_list = []
|
|
|
|
for s in skip_nodes:
|
|
|
|
skip_list.append(get_xml_text(s))
|
2019-11-14 03:25:45 +08:00
|
|
|
|
|
|
|
# Selective list of craft to include, overriding the skip list.
|
|
|
|
include_nodes = scm.findall('include')
|
|
|
|
include_list = []
|
|
|
|
for node in include_nodes:
|
|
|
|
include_list.append(get_xml_text(node))
|
|
|
|
if len(include_list):
|
|
|
|
skip_list = []
|
|
|
|
|
|
|
|
print("Skip list: %s" % skip_list)
|
|
|
|
print("Include list: %s" % include_list)
|
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
names = os.listdir(repo_path)
|
|
|
|
for name in sorted(names, key=lambda s: s.lower()):
|
2019-11-14 03:25:45 +08:00
|
|
|
if name in skip_list or (len(include_list) and name not in include_list):
|
2018-10-17 18:04:11 +08:00
|
|
|
if not args.quiet:
|
2019-11-14 03:25:45 +08:00
|
|
|
print("Skipping: %s" % name)
|
2016-08-18 02:33:10 +08:00
|
|
|
continue
|
2017-01-12 21:42:08 +08:00
|
|
|
|
2017-04-12 17:38:24 +08:00
|
|
|
# process each aircraft in turn
|
2019-11-18 17:06:24 +08:00
|
|
|
# print("%s %s" % (name, repo_path))
|
2017-04-12 17:38:24 +08:00
|
|
|
process_aircraft_dir(name, repo_path)
|
2017-01-12 21:42:08 +08:00
|
|
|
|
2016-08-18 02:33:10 +08:00
|
|
|
# write out the master catalog file
|
|
|
|
cat_file = os.path.join(output_dir, 'catalog.xml')
|
|
|
|
catalog_root.write(cat_file, encoding='utf-8', xml_declaration=True, pretty_print=True)
|
|
|
|
|
|
|
|
# write out the md5sum cache file
|
2019-11-18 17:06:24 +08:00
|
|
|
print(md5sum_file)
|
2016-08-18 02:33:10 +08:00
|
|
|
md5sum_tree.write(md5sum_file, encoding='utf-8', xml_declaration=True, pretty_print=True)
|
|
|
|
|
|
|
|
# look for orphaned zip files
|
|
|
|
files = os.listdir(output_dir)
|
|
|
|
for file in files:
|
|
|
|
if file.endswith('.zip')and not file in valid_zips:
|
2019-11-18 17:06:24 +08:00
|
|
|
print("orphaned zip: %s" % file)
|