mirror of
https://github.com/SynologyOpenSource/pkgscripts-ng.git
synced 2025-08-01 16:48:10 +00:00
Synology DSM6.0.2 toolkit framework
1. pythonize EnvDeploy and PkgCreate.py 2. EnvDeploy change sourceforge directory
This commit is contained in:
149
include/python/BuildEnv.py
Normal file
149
include/python/BuildEnv.py
Normal file
@ -0,0 +1,149 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
ScriptDir = os.path.realpath(os.path.dirname(__file__) + '/../../')
|
||||
SynoBase = os.path.dirname(ScriptDir)
|
||||
Prefix = os.path.dirname(SynoBase)
|
||||
SourceDir = SynoBase + "/source"
|
||||
|
||||
if 'lnxscripts' in os.path.basename(ScriptDir):
|
||||
__IsPkgEnv = False
|
||||
else:
|
||||
__IsPkgEnv = True
|
||||
|
||||
VIRTUAL_PROJECT_SEPARATOR = "-virtual-"
|
||||
ConfDir = 'SynoBuildConf'
|
||||
ProjectDependsName = "ProjectDepends.py"
|
||||
__PkgEnvVersion = None
|
||||
|
||||
|
||||
def setEnvironmentVersion(version):
|
||||
global __PkgEnvVersion
|
||||
__PkgEnvVersion = version
|
||||
|
||||
|
||||
def inChroot():
|
||||
return os.path.isfile('/root/.chroot')
|
||||
|
||||
|
||||
class Project:
|
||||
def __init__(self, proj, allow_missing=False):
|
||||
self.proj = proj
|
||||
self.allow_missing = allow_missing
|
||||
|
||||
if not inChroot():
|
||||
project_src = deVirtual(self.proj)
|
||||
|
||||
self.__project_dir = os.path.join(SourceDir, project_src)
|
||||
|
||||
@property
|
||||
def build_script(self):
|
||||
return self.__find_script('build')
|
||||
|
||||
@property
|
||||
def install_script(self):
|
||||
return self.__find_script('install')
|
||||
|
||||
@property
|
||||
def installdev_script(self):
|
||||
return self.__find_script('install-dev')
|
||||
|
||||
@property
|
||||
def error_script(self):
|
||||
return self.__find_script('error')
|
||||
|
||||
@property
|
||||
def depends_script(self):
|
||||
return self.__find_script('depends')
|
||||
|
||||
def settings(self, chroot=None):
|
||||
return self.__find_script('settings', chroot)
|
||||
|
||||
def collect(self, chroot=None):
|
||||
return self.__find_script('collect', chroot)
|
||||
|
||||
def selfcheck(self, chroot=None):
|
||||
return self.__find_script('selfcheck', chroot)
|
||||
|
||||
def info(self, chroot=None):
|
||||
return os.path.join(self.project_dir(chroot), 'INFO')
|
||||
|
||||
def project_dir(self, chroot=None):
|
||||
project_dir = self.__project_dir
|
||||
if chroot:
|
||||
project_dir = os.path.join(chroot, os.path.basename(SourceDir), self.proj)
|
||||
return project_dir
|
||||
|
||||
def __find_script(self, script_type, chroot=None):
|
||||
for proj_name in [self.proj, self.proj.split("-32")[0], self.proj.split("-virtual-32")[0]]:
|
||||
virtual_name = getVirtualProjectExtension(proj_name)
|
||||
|
||||
script = os.path.join(self.project_dir(chroot), ConfDir, script_type + virtual_name)
|
||||
if os.path.isfile(script) or os.path.islink(script):
|
||||
return script
|
||||
|
||||
if self.allow_missing:
|
||||
return os.path.join(self.project_dir(chroot), ConfDir, script_type)
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
class DpkgNotFoundError(RuntimeError):
|
||||
def __init__(self, deb_name):
|
||||
print("Deb %s not found" % deb_name)
|
||||
|
||||
|
||||
def getIncludeVariable(include_file, variable):
|
||||
return subprocess.check_output('source %s/include/%s; echo $%s' % (ScriptDir, include_file, variable),
|
||||
shell=True, executable='/bin/bash').decode().strip()
|
||||
|
||||
|
||||
def getChrootSynoBase(platform, version=None, suffix=None):
|
||||
if __IsPkgEnv:
|
||||
env = 'build_env'
|
||||
if suffix:
|
||||
env += "-" + suffix
|
||||
if version is None:
|
||||
version = __PkgEnvVersion
|
||||
return os.path.join(SynoBase, env, 'ds.'+platform+'-'+version)
|
||||
return Prefix + '/ds.' + platform
|
||||
|
||||
|
||||
def getList(listName):
|
||||
ret = []
|
||||
for config in ["projects", "config"]:
|
||||
if os.path.exists(os.path.join(ScriptDir, 'include', config)):
|
||||
ret = getIncludeVariable(config, listName).split()
|
||||
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def isVirtualProject(proj):
|
||||
return VIRTUAL_PROJECT_SEPARATOR in proj
|
||||
|
||||
|
||||
def deVirtual(proj):
|
||||
return proj.split(VIRTUAL_PROJECT_SEPARATOR)[0]
|
||||
|
||||
|
||||
def getVirtualName(proj):
|
||||
if isVirtualProject(proj):
|
||||
return proj.split(VIRTUAL_PROJECT_SEPARATOR)[1]
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def getVirtualProjectExtension(proj):
|
||||
if isVirtualProject(proj):
|
||||
return VIRTUAL_PROJECT_SEPARATOR + getVirtualName(proj)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def IsPackageEnvironment():
|
||||
return __IsPkgEnv
|
11
include/python/cache.py
Normal file
11
include/python/cache.py
Normal file
@ -0,0 +1,11 @@
|
||||
|
||||
class cache(dict):
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
|
||||
def __call__(self, *args):
|
||||
return self[args]
|
||||
|
||||
def __missing__(self, key):
|
||||
result = self[key] = self.func(*key)
|
||||
return result
|
42
include/python/chroot.py
Normal file
42
include/python/chroot.py
Normal file
@ -0,0 +1,42 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
class Chroot:
|
||||
def umount(self):
|
||||
try:
|
||||
subprocess.check_call(['umount', os.path.join(self.chroot, 'proc')])
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
def mount(self):
|
||||
try:
|
||||
mount_point = os.path.join(self.chroot, 'proc')
|
||||
if not os.path.ismount(mount_point):
|
||||
subprocess.check_call(['mount', '-t', 'proc', 'none', mount_point])
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
def __init__(self, path):
|
||||
self.chroot = path
|
||||
self.orig_fd = os.open("/", os.O_RDONLY)
|
||||
self.chroot_fd = os.open(self.chroot, os.O_RDONLY)
|
||||
|
||||
def __enter__(self):
|
||||
self.mount()
|
||||
os.chroot(self.chroot)
|
||||
os.fchdir(self.chroot_fd)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
os.fchdir(self.orig_fd)
|
||||
os.chroot(".")
|
||||
os.close(self.orig_fd)
|
||||
os.close(self.chroot_fd)
|
||||
self.umount()
|
||||
|
||||
def get_outside_path(self, path):
|
||||
return self.chroot + "/" + path
|
||||
|
||||
def get_inside_path(self, path):
|
||||
return path.replace(self.chroot, "")
|
164
include/python/config_parser.py
Normal file
164
include/python/config_parser.py
Normal file
@ -0,0 +1,164 @@
|
||||
import os
|
||||
import configparser
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class ConfigNotFoundError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
def remove_quote(string):
|
||||
if "#" in string:
|
||||
string = string.split("#")[0].strip()
|
||||
|
||||
return string.strip('"').strip("'")
|
||||
|
||||
|
||||
class ConfigParser():
|
||||
def __init__(self, config):
|
||||
if not os.path.isfile(config):
|
||||
raise ConfigNotFoundError(config)
|
||||
|
||||
self.config = configparser.ConfigParser(allow_no_value=True)
|
||||
self.config.optionxform = str
|
||||
self.config.read(config)
|
||||
|
||||
def _get_section_keys(self, section):
|
||||
if self.config.has_section(section):
|
||||
return list(set(map(remove_quote, self.config[section].keys())))
|
||||
else:
|
||||
return []
|
||||
|
||||
def _get_section_values(self, section):
|
||||
if self.config.has_section(section):
|
||||
return list(set(map(remove_quote, self.config[section].values())))
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_section_dict(self, section):
|
||||
ret = defaultdict(list)
|
||||
if self.config.has_section(section):
|
||||
ret = defaultdict(list, self.config[section])
|
||||
for key, value in ret.items():
|
||||
ret[key] = remove_quote(value).split()
|
||||
return ret
|
||||
|
||||
|
||||
class DependsParser(ConfigParser):
|
||||
sec_build_dep = 'BuildDependent'
|
||||
sec_build_tag = 'BuildDependent-Tag'
|
||||
sec_ref = 'ReferenceOnly'
|
||||
sec_ref_tag = 'ReferenceOnly-Tag'
|
||||
sec_pack = 'PackagePacking'
|
||||
sec_pack_tag = 'PackagePacking-Tag'
|
||||
sec_unittest = 'Unittest'
|
||||
sec_default = 'default'
|
||||
|
||||
def convert_value_str(self, d):
|
||||
ret = {}
|
||||
for key, value in d.items():
|
||||
ret[key] = value[0]
|
||||
|
||||
return ret
|
||||
|
||||
@property
|
||||
def build_dep(self):
|
||||
return self._get_section_keys(self.sec_build_dep)
|
||||
|
||||
@property
|
||||
def build_tag(self):
|
||||
return self._get_section_keys(self.sec_build_tag)
|
||||
|
||||
@property
|
||||
def ref_only(self):
|
||||
return self._get_section_keys(self.sec_ref)
|
||||
|
||||
@property
|
||||
def ref_only_tag(self):
|
||||
return self._get_section_keys(self.sec_ref_tag)
|
||||
|
||||
@property
|
||||
def pack(self):
|
||||
return self._get_section_keys(self.sec_ref)
|
||||
|
||||
@property
|
||||
def pack_tag(self):
|
||||
return self._get_section_keys(self.sec_ref_tag)
|
||||
|
||||
@property
|
||||
def unittest(self):
|
||||
return self._get_section_keys(self.sec_unittest)
|
||||
|
||||
def get_env_section(self, section):
|
||||
return self.convert_value_str(self.get_section_dict(section))
|
||||
|
||||
def get_all_dependent(self):
|
||||
return self.build_dep, self.build_dep_tag, self.ref_only, self.ref_only_tag
|
||||
|
||||
|
||||
class ProjectDependsParser(ConfigParser):
|
||||
sec_project_dep = 'project dependency'
|
||||
sec_64_project_dep = '64bit project dependency'
|
||||
sec_kernel = '${Kernel}'
|
||||
sec_variables = 'variables'
|
||||
sec_dynamic_vars = 'dynamic variable list'
|
||||
|
||||
@property
|
||||
def project_depends(self):
|
||||
return self.get_section_dict(self.sec_project_dep)
|
||||
|
||||
@property
|
||||
def all_kernels(self):
|
||||
return self._get_section_values(self.sec_kernel)
|
||||
|
||||
@property
|
||||
def variables(self):
|
||||
return self.get_section_dict(self.sec_variables)
|
||||
|
||||
@property
|
||||
def dynamic_variables(self):
|
||||
return self.get_section_dict(self.sec_dynamic_vars)['list']
|
||||
|
||||
def get_platform_kernel(self, platform):
|
||||
return self.get_section_dict(self.sec_kernel)[platform][0]
|
||||
|
||||
def get_platform_kernels(self, platforms):
|
||||
return [self.get_platform_kernel(_) for _ in platforms]
|
||||
|
||||
def get_project_dep(self, project):
|
||||
return self.project_depends[project]
|
||||
|
||||
def get_dyn_sec_value(self, dyn_var, platform):
|
||||
if dyn_var not in self.config:
|
||||
raise RuntimeError("[%s] not in project.depends." % dyn_var)
|
||||
|
||||
if platform in self.get_section_dict(dyn_var):
|
||||
return self.get_section_dict(dyn_var)[platform][0]
|
||||
elif 'default' in self.get_section_dict(dyn_var):
|
||||
return self.get_section_dict(dyn_var)['default'][0]
|
||||
|
||||
def get_dyn_sec_values(self, dyn_var, platforms):
|
||||
return [self.get_dyn_sec_value(dyn_var, _) for _ in platforms]
|
||||
|
||||
|
||||
class PackageSettingParser(ConfigParser):
|
||||
def get_section(self, package):
|
||||
return self.get_section_dict(package)
|
||||
|
||||
|
||||
class KeyValueParser():
|
||||
def __init__(self, f):
|
||||
if not os.path.isfile(f):
|
||||
raise ConfigNotFoundError(f)
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.optionxform = str
|
||||
with open(f, 'r', encoding='utf-8') as fd:
|
||||
config.read_string('[top]\n' + fd.read())
|
||||
self.config = dict(config['top'])
|
||||
|
||||
def __getitem__(self, key):
|
||||
return remove_quote(self.config[key])
|
||||
|
||||
def keys(self):
|
||||
return self.config.keys()
|
52
include/python/link_project.py
Normal file
52
include/python/link_project.py
Normal file
@ -0,0 +1,52 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import shutil
|
||||
sys.path.append(os.path.dirname(__file__))
|
||||
import BuildEnv
|
||||
|
||||
|
||||
class LinkProjectError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
def get_project_source(project):
|
||||
if BuildEnv.VIRTUAL_PROJECT_SEPARATOR in project:
|
||||
project = project.split(BuildEnv.VIRTUAL_PROJECT_SEPARATOR)[0]
|
||||
return os.path.join(BuildEnv.SourceDir, project)
|
||||
|
||||
|
||||
def link(source, dest, verbase=False):
|
||||
if not os.path.exists(source):
|
||||
raise LinkProjectError("%s not exist." % source)
|
||||
|
||||
print("Link %s -> %s" % (source, dest))
|
||||
subprocess.check_call(['cp', '-al', source, dest])
|
||||
|
||||
|
||||
def link_scripts(chroot):
|
||||
dest_path = os.path.join(chroot, os.path.basename(BuildEnv.ScriptDir))
|
||||
if os.path.isdir(dest_path):
|
||||
shutil.rmtree(dest_path)
|
||||
link(BuildEnv.ScriptDir, dest_path)
|
||||
|
||||
|
||||
def link_projects(projects, dest):
|
||||
for proj in projects:
|
||||
dest_path = os.path.join(dest, 'source', proj)
|
||||
if os.path.isdir(dest_path):
|
||||
shutil.rmtree(dest_path)
|
||||
link(get_project_source(proj), os.path.join(dest, 'source', proj))
|
||||
|
||||
|
||||
def link_platform(project, platform, version=None):
|
||||
source = get_project_source(project)
|
||||
chroot = BuildEnv.getChrootSynoBase(platform, version)
|
||||
dest = os.path.join(chroot, "source", project)
|
||||
|
||||
if os.path.isdir(dest):
|
||||
shutil.rmtree(dest)
|
||||
|
||||
link(source, dest)
|
||||
|
||||
|
88
include/python/parallel.py
Normal file
88
include/python/parallel.py
Normal file
@ -0,0 +1,88 @@
|
||||
import multiprocessing
|
||||
import traceback
|
||||
|
||||
|
||||
class LogExceptions(object):
|
||||
def __init__(self, callable):
|
||||
self.__callable = callable
|
||||
return
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
try:
|
||||
result = self.__callable(*args, **kwargs)
|
||||
|
||||
except Exception:
|
||||
print(traceback.format_exc())
|
||||
raise
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def doParallel(func, items, *args, **kwargs):
|
||||
pool = multiprocessing.Pool(processes=None)
|
||||
results = []
|
||||
|
||||
try:
|
||||
for item in items:
|
||||
if isinstance(item, str):
|
||||
argument = [item] + list(args)
|
||||
else:
|
||||
argument = list(item) + list(args)
|
||||
results.append(pool.apply_async(LogExceptions(func), argument, kwargs))
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
for result in results:
|
||||
result.get()
|
||||
|
||||
except (KeyboardInterrupt, Exception):
|
||||
pool.terminate()
|
||||
pool.join()
|
||||
raise
|
||||
|
||||
|
||||
def doPlatformParallel(func, platforms, *args, **kwargs):
|
||||
pool = multiprocessing.Pool(processes=None)
|
||||
results = dict()
|
||||
output = dict()
|
||||
|
||||
try:
|
||||
for platform in platforms:
|
||||
argument = [platform] + list(args)
|
||||
results[platform] = pool.apply_async(LogExceptions(func), argument, kwargs)
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
for item in results:
|
||||
output[item] = results[item].get()
|
||||
|
||||
except (KeyboardInterrupt, Exception):
|
||||
pool.terminate()
|
||||
pool.join()
|
||||
raise
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def parallelDict(dict):
|
||||
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
|
||||
results = []
|
||||
output = []
|
||||
|
||||
try:
|
||||
for func in dict:
|
||||
for item in dict[func]:
|
||||
results.append(pool.apply_async(LogExceptions(func), list(item)))
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
for result in results:
|
||||
ret = result.get()
|
||||
if ret:
|
||||
output.append(ret)
|
||||
except (KeyboardInterrupt, Exception):
|
||||
pool.terminate()
|
||||
pool.join()
|
||||
raise
|
||||
|
||||
return output
|
145
include/python/project_visitor.py
Normal file
145
include/python/project_visitor.py
Normal file
@ -0,0 +1,145 @@
|
||||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
from config_parser import ProjectDependsParser, DependsParser
|
||||
import BuildEnv
|
||||
|
||||
|
||||
class UpdateFailedError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class ConflictError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class UpdateHook:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def update_tag(self, projects):
|
||||
pass
|
||||
|
||||
def update_branch(self, projects):
|
||||
pass
|
||||
|
||||
|
||||
class ProjectVisitor:
|
||||
def __init__(self, update_hook, dep_level, platforms, depends_cache=None, check_conflict=False):
|
||||
self.dict_projects = None
|
||||
self.update_hook = update_hook
|
||||
self.dep_level = dep_level
|
||||
self.proj_depends = ProjectDependsParser(os.path.join(BuildEnv.ScriptDir, 'include', 'project.depends'))
|
||||
self.platforms = platforms
|
||||
self.depends_cache = depends_cache
|
||||
self.check_conflict = check_conflict
|
||||
|
||||
def devirtual_all(self, projs):
|
||||
return set(map(BuildEnv.deVirtual, projs))
|
||||
|
||||
def traverse(self, root_proj):
|
||||
if not isinstance(root_proj, list):
|
||||
root_proj = [root_proj]
|
||||
|
||||
self.dict_projects = defaultdict(set)
|
||||
self._traverse_projects(root_proj, 1)
|
||||
return self.dict_projects
|
||||
|
||||
def checkout_git_refs(self):
|
||||
if self.update_hook:
|
||||
self.update_hook.update_tag(self.dict_projects['refTags'])
|
||||
self.update_hook.update_branch(self.dict_projects['refs'])
|
||||
intersect_projs = self.devirtual_all(self.dict_projects['tags']) & self.devirtual_all(self.dict_projects['branches'])
|
||||
if intersect_projs:
|
||||
self.update_hook.update_branch(intersect_projs)
|
||||
|
||||
def show_proj_info(self):
|
||||
print("[INFO] Branch projects: " + " ".join(self.dict_projects['branches']))
|
||||
print("[INFO] Tag projects: " + " ".join(self.dict_projects['tags']))
|
||||
print("[INFO] Reference projects: " + " ".join(self.dict_projects['refs']))
|
||||
print("[INFO] Reference tag projects: " + " ".join(self.dict_projects['refTags']))
|
||||
|
||||
def _traverse_projects(self, projects, level):
|
||||
if not projects:
|
||||
return
|
||||
|
||||
if self.update_hook:
|
||||
self.update_hook.update_branch(projects)
|
||||
self.dict_projects['branches'].update(projects)
|
||||
|
||||
self._check_confict()
|
||||
|
||||
if level == self.dep_level:
|
||||
return
|
||||
|
||||
new_masters, new_tags, new_refs, new_ref_tags = self._resolve_project_catagory(projects)
|
||||
self.dict_projects['refs'].update(new_refs)
|
||||
self.dict_projects['refTags'].update(new_ref_tags)
|
||||
|
||||
new_masters -= self.dict_projects['branches']
|
||||
new_tags -= self.dict_projects['tags']
|
||||
|
||||
self._traverse_projects(new_masters, level+1)
|
||||
self._traverse_tag_projects(new_tags, level+1)
|
||||
|
||||
def _traverse_tag_projects(self, projects, level):
|
||||
if not projects:
|
||||
return
|
||||
|
||||
if self.update_hook and not self.depends_cache:
|
||||
self.update_hook.update_tag(projects)
|
||||
self.dict_projects['tags'].update(projects)
|
||||
|
||||
self._check_confict()
|
||||
|
||||
if level == self.dep_level:
|
||||
return
|
||||
|
||||
if self.depends_cache:
|
||||
new_masters, new_tags, new_refs, new_ref_tags = self.depends_cache.get(projects)
|
||||
else:
|
||||
new_masters, new_tags, new_refs, new_ref_tags = self._resolve_project_catagory(projects)
|
||||
|
||||
self.dict_projects['refs'].update(new_refs)
|
||||
self.dict_projects['refTags'].update(new_ref_tags)
|
||||
dep_projs = new_masters | new_tags
|
||||
|
||||
tag_projs = dep_projs - self.dict_projects['tags']
|
||||
|
||||
self._traverse_tag_projects(tag_projs, level+1)
|
||||
|
||||
def _resolve_project_catagory(self, projects):
|
||||
branches = set()
|
||||
tags = set()
|
||||
refs = set()
|
||||
refTags = set()
|
||||
|
||||
for proj in projects:
|
||||
depends_file = BuildEnv.Project(proj).depends_script
|
||||
if os.path.isfile(depends_file):
|
||||
depends = DependsParser(depends_file)
|
||||
branches.update(depends.build_dep)
|
||||
tags.update(depends.build_tag)
|
||||
refs.update(depends.ref_only)
|
||||
refTags.update(depends.ref_only_tag)
|
||||
else:
|
||||
if proj in self.proj_depends.project_depends:
|
||||
tags.update(self.proj_depends.get_project_dep(proj))
|
||||
|
||||
for catagory in branches, tags, refs, refTags:
|
||||
# dynamic variable
|
||||
for var in self.proj_depends.dynamic_variables:
|
||||
if var in catagory:
|
||||
catagory.remove(var)
|
||||
catagory.update(self.proj_depends.get_dyn_sec_values(var, self.platforms))
|
||||
|
||||
# synobios
|
||||
for k, v in self.proj_depends.variables.items():
|
||||
if k in catagory:
|
||||
catagory.remove(k)
|
||||
catagory.update(v + self.proj_depends.get_platform_kernels(self.platforms))
|
||||
|
||||
return branches, tags, refs, refTags
|
||||
|
||||
def _check_confict(self):
|
||||
pass
|
26
include/python/tee.py
Normal file
26
include/python/tee.py
Normal file
@ -0,0 +1,26 @@
|
||||
import os
|
||||
|
||||
class Tee:
|
||||
def __init__(self, stream, log_file, buffer=1, move=True):
|
||||
if move:
|
||||
self.move_log_old(log_file)
|
||||
self.stream = stream
|
||||
self.log = open(log_file, 'a', buffer)
|
||||
|
||||
def write(self, msg):
|
||||
self.stream.write(msg)
|
||||
self.log.write(msg)
|
||||
|
||||
def flush(self):
|
||||
self.stream.flush()
|
||||
self.log.flush()
|
||||
|
||||
def __del__(self):
|
||||
self.log.close()
|
||||
|
||||
def move_log_old(self, log):
|
||||
if os.path.isfile(log):
|
||||
old = log + ".old"
|
||||
if os.path.isfile(old):
|
||||
os.remove(old)
|
||||
os.rename(log, old)
|
27
include/python/toolkit.py
Normal file
27
include/python/toolkit.py
Normal file
@ -0,0 +1,27 @@
|
||||
import os
|
||||
|
||||
|
||||
class TarballManager:
|
||||
def __init__(self, version, root):
|
||||
self.version = version
|
||||
self.root = root
|
||||
|
||||
@property
|
||||
def base_tarball_name(self):
|
||||
return 'base_env-%s.txz' % self.version
|
||||
|
||||
@property
|
||||
def base_tarball_path(self):
|
||||
return os.path.join(self.root, self.base_tarball_name)
|
||||
|
||||
def get_env_tarball_name(self, platform):
|
||||
return 'ds.%s-%s.env.txz' % (platform, self.version)
|
||||
|
||||
def get_env_tarball_path(self, platform):
|
||||
return os.path.join(self.root, self.get_env_tarball_name(platform))
|
||||
|
||||
def get_dev_tarball_name(self, platform):
|
||||
return 'ds.%s-%s.dev.txz' % (platform, self.version)
|
||||
|
||||
def get_dev_tarball_path(self, platform):
|
||||
return os.path.join(self.root, self.get_dev_tarball_name(platform))
|
16
include/python/version_file.py
Normal file
16
include/python/version_file.py
Normal file
@ -0,0 +1,16 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.dirname(__file__))
|
||||
|
||||
import config_parser
|
||||
|
||||
|
||||
class VersionFile(config_parser.KeyValueParser):
|
||||
@property
|
||||
def dsm_version(self):
|
||||
return self['majorversion'] + "." + self['minorversion']
|
||||
|
||||
@property
|
||||
def buildnumber(self):
|
||||
return self['buildnumber']
|
Reference in New Issue
Block a user