2015-06-05 05:09:46 +08:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2015-07-30 02:54:45 +08:00
|
|
|
import os, sys, re, glob, shutil
|
2015-06-05 05:09:46 +08:00
|
|
|
import subprocess
|
|
|
|
import sgprops
|
2015-07-23 11:35:57 +08:00
|
|
|
import argparse
|
|
|
|
import urllib2
|
2015-07-26 11:45:01 +08:00
|
|
|
import package as pkg
|
2015-06-05 05:09:46 +08:00
|
|
|
|
|
|
|
import svn_catalog_repository
|
|
|
|
import git_catalog_repository
|
|
|
|
import git_discrete_repository
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
parser = argparse.ArgumentParser()
|
2015-07-28 10:40:00 +08:00
|
|
|
parser.add_argument("--clean", help="Regenerate every package",
|
|
|
|
action="store_true")
|
|
|
|
parser.add_argument("--update", help="Update/pull SCM source",
|
|
|
|
action="store_true")
|
2015-09-25 09:38:54 +08:00
|
|
|
parser.add_argument("--force-dirty", dest="forcedirty",
|
|
|
|
help="Mark every package as dirty", action="store_true")
|
2015-07-28 11:39:32 +08:00
|
|
|
parser.add_argument("--no-update",
|
|
|
|
dest = "noupdate",
|
|
|
|
help="Disable updating from SCM source",
|
2015-07-28 10:45:55 +08:00
|
|
|
action="store_true")
|
2015-07-23 11:35:57 +08:00
|
|
|
parser.add_argument("dir", help="Catalog directory")
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2015-07-19 04:30:20 +08:00
|
|
|
includePaths = []
|
2015-09-25 08:45:42 +08:00
|
|
|
packages = {}
|
2015-07-15 10:37:39 +08:00
|
|
|
|
2015-07-29 04:26:00 +08:00
|
|
|
def scanPackages(scmRepo):
|
2015-06-05 05:09:46 +08:00
|
|
|
result = []
|
2015-07-29 04:26:00 +08:00
|
|
|
globPath = scmRepo.aircraftPath
|
|
|
|
if globPath is None:
|
|
|
|
return result
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
print "Scanning", globPath
|
|
|
|
print os.getcwd()
|
|
|
|
for d in glob.glob(globPath):
|
2015-07-19 04:30:20 +08:00
|
|
|
# check dir contains at least one -set.xml file
|
|
|
|
if len(glob.glob(os.path.join(d, "*-set.xml"))) == 0:
|
|
|
|
print "no -set.xml in", d
|
|
|
|
continue
|
|
|
|
|
2015-07-29 04:26:00 +08:00
|
|
|
result.append(pkg.PackageData(d, scmRepo))
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
return result
|
|
|
|
|
|
|
|
def initScmRepository(node):
|
|
|
|
scmType = node.getValue("type")
|
|
|
|
if (scmType == "svn"):
|
2015-07-29 04:26:00 +08:00
|
|
|
return svn_catalog_repository.SVNCatalogRepository(node)
|
2015-07-12 04:47:13 +08:00
|
|
|
elif (scmType == "git"):
|
2015-07-29 04:26:00 +08:00
|
|
|
return git_catalog_repository.GITCatalogRepository(node)
|
2015-07-12 04:47:13 +08:00
|
|
|
elif (scmType == "git-discrete"):
|
|
|
|
return git_discrete_repository.GitDiscreteSCM(node)
|
|
|
|
elif (scmType == None):
|
2015-06-05 05:09:46 +08:00
|
|
|
raise RuntimeError("No scm/type defined in catalog configuration")
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Unspported SCM type:" + scmType)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-09-25 08:45:42 +08:00
|
|
|
def initRepositories():
|
|
|
|
repositories = []
|
|
|
|
|
|
|
|
for scm in config.getChildren("scm"):
|
|
|
|
scmRepo = initScmRepository(scm)
|
|
|
|
if args.update or (not args.noupdate and scm.getValue("update")):
|
|
|
|
scmRepo.update()
|
|
|
|
# presumably include repos in parse path
|
|
|
|
# TODO: make this configurable
|
|
|
|
includePaths.append(scmRepo.path)
|
|
|
|
repositories.append(scmRepo)
|
|
|
|
|
|
|
|
return repositories
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
def processUpload(node, outputPath):
|
2015-07-12 04:47:13 +08:00
|
|
|
if not node.getValue("enabled", True):
|
|
|
|
print "Upload disabled"
|
|
|
|
return
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
uploadType = node.getValue("type")
|
2015-07-12 04:47:13 +08:00
|
|
|
if (uploadType == "rsync"):
|
|
|
|
subprocess.call(["rsync", node.getValue("args", "-az"), ".",
|
2015-06-05 05:09:46 +08:00
|
|
|
node.getValue("remote")],
|
|
|
|
cwd = outputPath)
|
2015-07-23 11:35:57 +08:00
|
|
|
elif (uploadType == "rsync-ssh"):
|
2015-07-29 04:26:00 +08:00
|
|
|
print "Doing rsync upload to:", node.getValue("remote")
|
2015-07-23 11:35:57 +08:00
|
|
|
subprocess.call(["rsync", node.getValue("args", "-azve"),
|
|
|
|
"ssh", ".",
|
|
|
|
node.getValue("remote")],
|
|
|
|
cwd = outputPath)
|
2015-07-12 04:47:13 +08:00
|
|
|
elif (uploadType == "scp"):
|
2015-07-23 11:35:57 +08:00
|
|
|
subprocess.call(["scp", node.getValue("args", "-r"), ".",
|
|
|
|
node.getValue("remote")],
|
|
|
|
cwd = outputPath)
|
2015-06-05 05:09:46 +08:00
|
|
|
else:
|
|
|
|
raise RuntimeError("Unsupported upload type:" + uploadType)
|
|
|
|
|
2015-07-30 02:54:45 +08:00
|
|
|
def parseExistingCatalog():
|
|
|
|
global existingCatalogPath
|
|
|
|
global previousCatalog
|
|
|
|
|
|
|
|
# contains existing catalog
|
|
|
|
existingCatalogPath = os.path.join(outPath, 'catalog.xml')
|
|
|
|
|
|
|
|
if not os.path.exists(existingCatalogPath):
|
|
|
|
url = config.getValue("template/url")
|
|
|
|
print "Attempting downloading from", url
|
|
|
|
try:
|
|
|
|
# can happen on new or from clean, try to pull current
|
|
|
|
# catalog from the upload location
|
|
|
|
response = urllib2.urlopen(url, timeout = 5)
|
|
|
|
content = response.read()
|
|
|
|
f = open(existingCatalogPath, 'w' )
|
|
|
|
f.write( content )
|
|
|
|
f.close()
|
|
|
|
print "...worked"
|
|
|
|
except urllib2.URLError as e:
|
|
|
|
print "Downloading current catalog failed", e, "from", url
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
rootDir = args.dir
|
2015-07-12 04:47:13 +08:00
|
|
|
if not os.path.isabs(rootDir):
|
|
|
|
rootDir = os.path.abspath(rootDir)
|
|
|
|
os.chdir(rootDir)
|
2015-06-05 05:09:46 +08:00
|
|
|
|
|
|
|
configPath = 'catalog.config.xml'
|
2015-07-12 04:47:13 +08:00
|
|
|
if not os.path.exists(configPath):
|
2015-06-05 05:09:46 +08:00
|
|
|
raise RuntimeError("no config file found at:" + configPath)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
config = sgprops.readProps(configPath)
|
2015-06-05 05:09:46 +08:00
|
|
|
|
|
|
|
# out path
|
|
|
|
outPath = config.getValue('output-dir')
|
|
|
|
if outPath is None:
|
|
|
|
# default out path
|
2015-07-12 04:47:13 +08:00
|
|
|
outPath = os.path.join(rootDir, "output")
|
|
|
|
elif not os.path.isabs(outPath):
|
|
|
|
outPath = os.path.join(rootDir, "output")
|
|
|
|
|
2015-07-23 11:35:57 +08:00
|
|
|
if args.clean:
|
|
|
|
print "Cleaning output"
|
|
|
|
shutil.rmtree(outPath)
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
if not os.path.exists(outPath):
|
|
|
|
os.mkdir(outPath)
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
thumbnailPath = os.path.join(outPath, config.getValue('thumbnail-dir', "thumbnails"))
|
2015-07-19 04:30:20 +08:00
|
|
|
if not os.path.exists(thumbnailPath):
|
|
|
|
os.mkdir(thumbnailPath)
|
|
|
|
|
2015-07-28 11:39:32 +08:00
|
|
|
thumbnailUrls = list(t.value for t in config.getChildren("thumbnail-url"))
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-07-19 04:30:20 +08:00
|
|
|
for i in config.getChildren("include-dir"):
|
|
|
|
if not os.path.exists(i.value):
|
|
|
|
print "Skipping missing include path:", i.value
|
|
|
|
continue
|
|
|
|
includePaths.append(i.value)
|
|
|
|
|
2015-07-30 02:54:45 +08:00
|
|
|
parseExistingCatalog()
|
2015-09-25 08:45:42 +08:00
|
|
|
repositories = initRepositories()
|
2015-06-05 05:09:46 +08:00
|
|
|
|
2015-09-25 08:45:42 +08:00
|
|
|
for scm in repositories:
|
|
|
|
for p in scanPackages(scm):
|
|
|
|
p.scanSetXmlFiles(includePaths)
|
2015-06-05 05:09:46 +08:00
|
|
|
packages[p.id] = p
|
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
if os.path.exists(existingCatalogPath):
|
|
|
|
try:
|
|
|
|
previousCatalog = sgprops.readProps(existingCatalogPath)
|
|
|
|
except:
|
|
|
|
print "Previous catalog is malformed"
|
|
|
|
previousCatalog = sgprops.Node()
|
|
|
|
|
|
|
|
for p in previousCatalog.getChildren("package"):
|
|
|
|
pkgId = p.getValue("id")
|
|
|
|
if not pkgId in packages.keys():
|
|
|
|
print "Orphaned old package:", pkgId
|
|
|
|
continue
|
2015-06-05 05:09:46 +08:00
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
packages[pkgId].setPreviousData(p)
|
|
|
|
else:
|
|
|
|
print "No previous catalog"
|
2015-06-05 05:09:46 +08:00
|
|
|
|
2015-07-12 04:47:13 +08:00
|
|
|
catalogNode = sgprops.Node("catalog")
|
2015-06-05 05:09:46 +08:00
|
|
|
sgprops.copy(config.getChild("template"), catalogNode)
|
|
|
|
|
2015-09-20 22:20:43 +08:00
|
|
|
# version 3 catalog
|
|
|
|
catalogNode.getChild("catalog-version", create = True).value = 3
|
2015-07-28 11:39:32 +08:00
|
|
|
mirrorUrls = list(m.value for m in config.getChildren("mirror"))
|
2015-07-12 04:47:13 +08:00
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
packagesToGenerate = []
|
2015-07-12 04:47:13 +08:00
|
|
|
for p in packages.values():
|
2015-09-25 09:38:54 +08:00
|
|
|
if p.isSourceModified or args.forcedirty:
|
2015-06-05 05:09:46 +08:00
|
|
|
packagesToGenerate.append(p)
|
2015-07-12 04:47:13 +08:00
|
|
|
else:
|
|
|
|
p.useExistingCatalogData()
|
|
|
|
|
2015-07-31 01:04:17 +08:00
|
|
|
excludeFilePath = os.path.join(rootDir, "zip-excludes.lst")
|
2015-07-23 11:35:57 +08:00
|
|
|
|
|
|
|
# def f(x):
|
|
|
|
# x.generateZip(outPath)
|
|
|
|
# x.extractThumbnails(thumbnailPath)
|
|
|
|
# return True
|
|
|
|
#
|
|
|
|
# p = Pool(8)
|
|
|
|
# print(p.map(f,packagesToGenerate))
|
|
|
|
|
2015-06-05 05:09:46 +08:00
|
|
|
for p in packagesToGenerate:
|
2015-07-31 01:04:17 +08:00
|
|
|
p.generateZip(outPath, excludeFilePath)
|
2015-07-23 11:35:57 +08:00
|
|
|
p.extractThumbnails(thumbnailPath)
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
print "Creating catalog"
|
|
|
|
for p in packages.values():
|
2015-07-29 04:26:00 +08:00
|
|
|
catalogNode.addChild(p.packageNode(mirrorUrls, thumbnailUrls[0]))
|
2015-07-12 04:47:13 +08:00
|
|
|
|
|
|
|
catalogNode.write(os.path.join(outPath, "catalog.xml"))
|
|
|
|
|
2015-07-28 10:40:00 +08:00
|
|
|
for up in config.getChildren("upload"):
|
|
|
|
processUpload(up, outPath)
|