2015-06-05 05:09:46 +08:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
|
|
import os, sys, re, glob
|
|
|
|
import hashlib # for MD5
|
|
|
|
import subprocess
|
2015-07-19 04:30:20 +08:00
|
|
|
import shutil # for copy2
|
2015-06-05 05:09:46 +08:00
|
|
|
import catalogTags
|
|
|
|
import sgprops
|
2015-07-23 11:35:57 +08:00
|
|
|
#from multiprocessing import Pool
|
|
|
|
import argparse
|
|
|
|
import urllib2
|
2015-06-05 05:09:46 +08:00
|
|
|
|
|
|
|
import svn_catalog_repository
|
|
|
|
import git_catalog_repository
|
|
|
|
import git_discrete_repository
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("--clean", help="Regenerate every package", type=bool)
|
|
|
|
parser.add_argument("dir", help="Catalog directory")
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2015-07-19 04:30:20 +08:00
|
|
|
standardTagSet = frozenset(catalogTags.tags)
|
|
|
|
def isNonstandardTag(t):
|
|
|
|
return t not in standardTagSet
|
2015-06-05 05:09:46 +08:00
|
|
|
|
2015-07-15 10:37:39 +08:00
|
|
|
thumbnailNames = ["thumbnail.png", "thumbnail.jpg"]
|
2015-07-19 04:30:20 +08:00
|
|
|
includePaths = []
|
2015-07-15 10:37:39 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
class VariantData:
|
2015-07-15 10:37:39 +08:00
|
|
|
def __init__(self, path, node):
|
|
|
|
#self._primary = primary
|
2015-06-05 05:09:46 +08:00
|
|
|
self._path = path
|
|
|
|
self._name = node.getValue("sim/description")
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
# ratings
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
# seperate thumbnails
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
@property
|
|
|
|
def catalogNode(self):
|
2015-07-19 04:30:20 +08:00
|
|
|
n = sgprops.Node("variant")
|
|
|
|
n.addChild("id").value = self._path
|
2015-07-12 04:47:13 +08:00
|
|
|
n.addChild("name").value = self._name
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
class PackageData:
|
2015-07-12 04:47:13 +08:00
|
|
|
def __init__(self, path):
|
2015-06-05 05:09:46 +08:00
|
|
|
self._path = path
|
|
|
|
self._previousSCMRevision = None
|
|
|
|
self._previousRevision = 0
|
|
|
|
self._thumbnails = []
|
|
|
|
self._variants = {}
|
2015-07-12 04:47:13 +08:00
|
|
|
self._revision = 0
|
|
|
|
self._md5 = None
|
|
|
|
self._fileSize = 0
|
|
|
|
|
|
|
|
self._node = sgprops.Node("package")
|
2015-06-05 05:09:46 +08:00
|
|
|
self._node.addChild("id").value = self.id
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
def setPreviousData(self, node):
|
2015-06-05 05:09:46 +08:00
|
|
|
self._previousRevision = node.getValue("revision")
|
|
|
|
self._previousMD5 = node.getValue("md5")
|
|
|
|
self._previousSCMRevision = node.getValue("scm-revision")
|
2015-07-12 04:47:13 +08:00
|
|
|
self._fileSize = int(node.getValue("file-size-bytes"))
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
@property
|
|
|
|
def id(self):
|
|
|
|
return os.path.basename(self._path)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
@property
|
|
|
|
def thumbnails(self):
|
|
|
|
return self._thumbnails
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
@property
|
|
|
|
def path(self):
|
|
|
|
return self._path
|
|
|
|
|
2015-07-15 10:37:39 +08:00
|
|
|
@property
|
|
|
|
def variants(self):
|
|
|
|
return self._variants
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
@property
|
|
|
|
def scmRevision(self):
|
2015-06-05 05:09:46 +08:00
|
|
|
currentRev = scmRepo.scmRevisionForPath(self._path)
|
|
|
|
if (currentRev is None):
|
|
|
|
raise RuntimeError("Unable to query SCM revision of files")
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
return currentRev
|
|
|
|
|
|
|
|
def isSourceModified(self, scmRepo):
|
|
|
|
if (self._previousSCMRevision == None):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if (self._previousSCMRevision == self.scmRevision):
|
2015-06-05 05:09:46 +08:00
|
|
|
return False
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
return True
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-07-19 04:30:20 +08:00
|
|
|
def scanSetXmlFiles(self, includes):
|
2015-06-05 05:09:46 +08:00
|
|
|
foundPrimary = False
|
2015-07-19 04:30:20 +08:00
|
|
|
foundMultiple = False
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
for f in os.listdir(self._path):
|
2015-07-12 04:47:13 +08:00
|
|
|
if not f.endswith("-set.xml"):
|
2015-06-05 05:09:46 +08:00
|
|
|
continue
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
p = os.path.join(self._path, f)
|
2015-07-19 04:30:20 +08:00
|
|
|
node = sgprops.readProps(p, includePaths = includes)
|
|
|
|
if not node.hasChild("sim"):
|
|
|
|
continue
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
simNode = node.getChild("sim")
|
2015-07-15 10:37:39 +08:00
|
|
|
if (simNode.getValue("exclude", False)):
|
2015-06-05 05:09:46 +08:00
|
|
|
continue
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
primary = simNode.getValue("variant-of", None)
|
|
|
|
if primary:
|
2015-07-15 10:37:39 +08:00
|
|
|
if not primary in self.variants:
|
2015-06-05 05:09:46 +08:00
|
|
|
self._variants[primary] = []
|
|
|
|
self._variants[primary].append(VariantData(self, node))
|
|
|
|
continue
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
if foundPrimary:
|
2015-07-19 04:30:20 +08:00
|
|
|
if not foundMultiple:
|
|
|
|
print "Multiple primary -set.xml files at:" + self._path
|
|
|
|
foundMultiple = True
|
2015-06-05 05:09:46 +08:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
foundPrimary = True;
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-07-15 10:37:39 +08:00
|
|
|
self.parsePrimarySetNode(simNode)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-07-15 10:37:39 +08:00
|
|
|
for n in thumbnailNames:
|
|
|
|
if os.path.exists(os.path.join(self._path, n)):
|
|
|
|
self._thumbnails.append(n)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
if not foundPrimary:
|
|
|
|
raise RuntimeError("No primary -set.xml found at:" + self._path)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
def parsePrimarySetNode(self, sim):
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
# basic / mandatory values
|
|
|
|
self._node.addChild('name').value = sim.getValue('description')
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
longDesc = sim.getValue('long-description')
|
|
|
|
if longDesc is not None:
|
|
|
|
self._node.addChild('description').value = longDesc
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
# copy all the standard values
|
|
|
|
for p in ['status', 'author', 'license']:
|
|
|
|
v = sim.getValue(p)
|
|
|
|
if v is not None:
|
|
|
|
self._node.addChild(p).value = v
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
# ratings
|
|
|
|
if sim.hasChild('rating'):
|
|
|
|
pkgRatings = self._node.addChild('rating')
|
|
|
|
for r in ['FDM', 'systems', 'cockpit', 'model']:
|
|
|
|
pkgRatings.addChild(r).value = sim.getValue('rating/' + r, 0)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
# copy tags
|
|
|
|
if sim.hasChild('tags'):
|
|
|
|
for c in sim.getChild('tags').getChildren('tag'):
|
|
|
|
if isNonstandardTag(c.value):
|
2015-07-19 04:30:20 +08:00
|
|
|
print "Skipping non-standard tag:", c.value, self.path
|
2015-06-05 05:09:46 +08:00
|
|
|
else:
|
|
|
|
self._node.addChild('tag').value = c.value
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-07-19 04:30:20 +08:00
|
|
|
for t in sim.getChildren("thumbnail"):
|
|
|
|
self._thumbnails.append(t.value)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
def validate(self):
|
|
|
|
for t in self._thumbnails:
|
|
|
|
if not os.path.exists(os.path.join(self._path, t)):
|
|
|
|
raise RuntimeError("missing thumbnail:" + t);
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
def generateZip(self, outDir):
|
|
|
|
self._revision = self._previousRevision + 1
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
zipName = self.id + ".zip"
|
2015-06-05 05:09:46 +08:00
|
|
|
zipFilePath = os.path.join(outDir, zipName)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
os.chdir(os.path.dirname(self.path))
|
|
|
|
|
|
|
|
print "Creating zip", zipFilePath
|
2015-06-05 05:09:46 +08:00
|
|
|
# TODO: exclude certain files
|
2015-07-12 04:47:13 +08:00
|
|
|
# anything we can do to make this faster?
|
|
|
|
subprocess.call(['zip', '--quiet', '-r', zipFilePath, self.id])
|
|
|
|
|
|
|
|
zipFile = open(zipFilePath, 'r')
|
2015-06-05 05:09:46 +08:00
|
|
|
self._md5 = hashlib.md5(zipFile.read()).hexdigest()
|
2015-07-12 04:47:13 +08:00
|
|
|
self._fileSize = os.path.getsize(zipFilePath)
|
|
|
|
|
|
|
|
def useExistingCatalogData(self):
|
|
|
|
self._md5 = self._previousMD5
|
|
|
|
|
|
|
|
def packageNode(self, mirrorUrls, thumbnailUrl):
|
2015-06-05 05:09:46 +08:00
|
|
|
self._node.getChild("md5", create = True).value = self._md5
|
|
|
|
self._node.getChild("file-size-bytes", create = True).value = self._fileSize
|
2015-07-12 04:47:13 +08:00
|
|
|
self._node.getChild("revision", create = True).value = int(self._revision)
|
|
|
|
self._node.getChild("scm-revision", create = True).value = self.scmRevision
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
for m in mirrorUrls:
|
2015-07-12 05:23:05 +08:00
|
|
|
self._node.addChild("url").value = m + "/" + self.id + ".zip"
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
for t in self._thumbnails:
|
2015-07-12 05:23:05 +08:00
|
|
|
self._node.addChild("thumbnail").value = thumbnailUrl + "/" + self.id + "_" + t
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
for pr in self._variants:
|
|
|
|
for vr in self._variants[pr]:
|
|
|
|
self._node.addChild(vr.catalogNode)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
return self._node
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
def extractThumbnails(self, thumbnailDir):
|
2015-06-05 05:09:46 +08:00
|
|
|
for t in self._thumbnails:
|
|
|
|
fullName = self.id + "_" + t
|
2015-07-19 04:30:20 +08:00
|
|
|
shutil.copy2(os.path.join(self._path, t),
|
2015-06-05 05:09:46 +08:00
|
|
|
os.path.join(thumbnailDir, fullName)
|
|
|
|
)
|
|
|
|
# TODO : verify image format, size and so on
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
def scanPackages(globPath):
|
|
|
|
result = []
|
2015-07-12 04:47:13 +08:00
|
|
|
print "Scanning", globPath
|
|
|
|
print os.getcwd()
|
|
|
|
for d in glob.glob(globPath):
|
2015-07-19 04:30:20 +08:00
|
|
|
# check dir contains at least one -set.xml file
|
|
|
|
if len(glob.glob(os.path.join(d, "*-set.xml"))) == 0:
|
|
|
|
print "no -set.xml in", d
|
|
|
|
continue
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
result.append(PackageData(d))
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
return result
|
|
|
|
|
|
|
|
def initScmRepository(node):
|
|
|
|
scmType = node.getValue("type")
|
|
|
|
if (scmType == "svn"):
|
|
|
|
svnPath = node.getValue("path")
|
2015-07-12 04:47:13 +08:00
|
|
|
return svn_catalog_repository.SVNCatalogRepository(svnPath)
|
|
|
|
elif (scmType == "git"):
|
2015-06-05 05:09:46 +08:00
|
|
|
gitPath = node.getValue("path")
|
|
|
|
usesSubmodules = node.getValue("uses-submodules", False)
|
2015-07-12 04:47:13 +08:00
|
|
|
return git_catalog_repository.GitCatalogRepository(gitPath, usesSubmodules)
|
|
|
|
elif (scmType == "git-discrete"):
|
|
|
|
return git_discrete_repository.GitDiscreteSCM(node)
|
|
|
|
elif (scmType == None):
|
2015-06-05 05:09:46 +08:00
|
|
|
raise RuntimeError("No scm/type defined in catalog configuration")
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Unspported SCM type:" + scmType)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
def processUpload(node, outputPath):
|
2015-07-12 04:47:13 +08:00
|
|
|
if not node.getValue("enabled", True):
|
|
|
|
print "Upload disabled"
|
|
|
|
return
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
uploadType = node.getValue("type")
|
2015-07-12 04:47:13 +08:00
|
|
|
if (uploadType == "rsync"):
|
|
|
|
subprocess.call(["rsync", node.getValue("args", "-az"), ".",
|
2015-06-05 05:09:46 +08:00
|
|
|
node.getValue("remote")],
|
|
|
|
cwd = outputPath)
|
2015-07-23 11:35:57 +08:00
|
|
|
elif (uploadType == "rsync-ssh"):
|
|
|
|
subprocess.call(["rsync", node.getValue("args", "-azve"),
|
|
|
|
"ssh", ".",
|
|
|
|
node.getValue("remote")],
|
|
|
|
cwd = outputPath)
|
2015-07-12 04:47:13 +08:00
|
|
|
elif (uploadType == "scp"):
|
2015-07-23 11:35:57 +08:00
|
|
|
subprocess.call(["scp", node.getValue("args", "-r"), ".",
|
|
|
|
node.getValue("remote")],
|
|
|
|
cwd = outputPath)
|
2015-06-05 05:09:46 +08:00
|
|
|
else:
|
|
|
|
raise RuntimeError("Unsupported upload type:" + uploadType)
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
# dictionary
|
2015-06-05 05:09:46 +08:00
|
|
|
packages = {}
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
rootDir = args.dir
|
2015-07-12 04:47:13 +08:00
|
|
|
if not os.path.isabs(rootDir):
|
|
|
|
rootDir = os.path.abspath(rootDir)
|
|
|
|
os.chdir(rootDir)
|
2015-06-05 05:09:46 +08:00
|
|
|
|
|
|
|
configPath = 'catalog.config.xml'
|
2015-07-12 04:47:13 +08:00
|
|
|
if not os.path.exists(configPath):
|
2015-06-05 05:09:46 +08:00
|
|
|
raise RuntimeError("no config file found at:" + configPath)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
config = sgprops.readProps(configPath)
|
2015-06-05 05:09:46 +08:00
|
|
|
|
|
|
|
# out path
|
|
|
|
outPath = config.getValue('output-dir')
|
|
|
|
if outPath is None:
|
|
|
|
# default out path
|
2015-07-12 04:47:13 +08:00
|
|
|
outPath = os.path.join(rootDir, "output")
|
|
|
|
elif not os.path.isabs(outPath):
|
|
|
|
outPath = os.path.join(rootDir, "output")
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
if args.clean:
|
|
|
|
print "Cleaning output"
|
|
|
|
shutil.rmtree(outPath)
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
if not os.path.exists(outPath):
|
|
|
|
os.mkdir(outPath)
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
thumbnailPath = os.path.join(outPath, config.getValue('thumbnail-dir', "thumbnails"))
|
2015-07-19 04:30:20 +08:00
|
|
|
if not os.path.exists(thumbnailPath):
|
|
|
|
os.mkdir(thumbnailPath)
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
thumbnailUrl = config.getValue('thumbnail-url')
|
|
|
|
|
2015-07-19 04:30:20 +08:00
|
|
|
for i in config.getChildren("include-dir"):
|
|
|
|
if not os.path.exists(i.value):
|
|
|
|
print "Skipping missing include path:", i.value
|
|
|
|
continue
|
|
|
|
includePaths.append(i.value)
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
mirrorUrls = []
|
2015-06-05 05:09:46 +08:00
|
|
|
|
|
|
|
# contains existing catalog
|
|
|
|
existingCatalogPath = os.path.join(outPath, 'catalog.xml')
|
|
|
|
|
|
|
|
scmRepo = initScmRepository(config.getChild('scm'))
|
|
|
|
|
|
|
|
# scan the directories in the aircraft paths
|
2015-07-12 04:47:13 +08:00
|
|
|
for g in config.getChildren("aircraft-dir"):
|
|
|
|
for p in scanPackages(g.value):
|
2015-06-05 05:09:46 +08:00
|
|
|
packages[p.id] = p
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
if not os.path.exists(existingCatalogPath):
|
|
|
|
try:
|
|
|
|
# can happen on new or from clean, try to pull current
|
|
|
|
# catalog from the upload location
|
|
|
|
response = urllib2.urlopen(config.getValue("template/url"), timeout = 5)
|
|
|
|
content = response.read()
|
|
|
|
f = open(existingCatalogPath, 'w' )
|
|
|
|
f.write( content )
|
|
|
|
f.close()
|
|
|
|
except urllib2.URLError as e:
|
|
|
|
print "Downloading current catalog failed", e
|
|
|
|
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
if os.path.exists(existingCatalogPath):
|
|
|
|
try:
|
|
|
|
previousCatalog = sgprops.readProps(existingCatalogPath)
|
|
|
|
except:
|
|
|
|
print "Previous catalog is malformed"
|
|
|
|
previousCatalog = sgprops.Node()
|
|
|
|
|
|
|
|
for p in previousCatalog.getChildren("package"):
|
|
|
|
pkgId = p.getValue("id")
|
|
|
|
if not pkgId in packages.keys():
|
|
|
|
print "Orphaned old package:", pkgId
|
|
|
|
continue
|
2015-06-05 05:09:46 +08:00
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
packages[pkgId].setPreviousData(p)
|
|
|
|
else:
|
|
|
|
print "No previous catalog"
|
2015-06-05 05:09:46 +08:00
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
catalogNode = sgprops.Node("catalog")
|
2015-06-05 05:09:46 +08:00
|
|
|
sgprops.copy(config.getChild("template"), catalogNode)
|
|
|
|
|
2015-07-12 05:23:05 +08:00
|
|
|
mirrorUrls = (m.value for m in config.getChildren("mirror"))
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
packagesToGenerate = []
|
2015-07-12 04:47:13 +08:00
|
|
|
for p in packages.values():
|
2015-07-19 04:30:20 +08:00
|
|
|
p.scanSetXmlFiles(includePaths)
|
2015-07-15 10:37:39 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
if (p.isSourceModified(scmRepo)):
|
|
|
|
packagesToGenerate.append(p)
|
2015-07-12 04:47:13 +08:00
|
|
|
else:
|
|
|
|
p.useExistingCatalogData()
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
|
|
|
|
# def f(x):
|
|
|
|
# x.generateZip(outPath)
|
|
|
|
# x.extractThumbnails(thumbnailPath)
|
|
|
|
# return True
|
|
|
|
#
|
|
|
|
# p = Pool(8)
|
|
|
|
# print(p.map(f,packagesToGenerate))
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
for p in packagesToGenerate:
|
2015-07-23 11:35:57 +08:00
|
|
|
p.generateZip(outPath)
|
|
|
|
p.extractThumbnails(thumbnailPath)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
print "Creating catalog"
|
|
|
|
for p in packages.values():
|
|
|
|
catalogNode.addChild(p.packageNode(mirrorUrls, thumbnailUrl))
|
|
|
|
|
|
|
|
catalogNode.write(os.path.join(outPath, "catalog.xml"))
|
|
|
|
|
|
|
|
print "Uploading"
|
2015-06-05 05:09:46 +08:00
|
|
|
if config.hasChild("upload"):
|
2015-07-12 04:47:13 +08:00
|
|
|
processUpload(config.getChild("upload"), outPath)
|