From 76eaed3cea1cb6b022f6e049ee2f66c2311be47a Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Mon, 19 Sep 2016 00:20:44 -0500 Subject: [PATCH 01/28] bulk edit: os.path(), open() -> pathlib API edit tool is [pathlib_edit.py][1] Two tweaks in repository.py were done manually. [1]: https://gist.github.com/dckc/40c8caf4c1dc0027ac0d3b1fdbb251d2 --- src/builder.py | 84 ++++++------- src/catalog.py | 290 ++++++++++++++++++++++----------------------- src/catalog_set.py | 6 +- src/distfile.py | 38 +++--- src/package.py | 10 +- src/repository.py | 23 ++-- 6 files changed, 225 insertions(+), 226 deletions(-) diff --git a/src/builder.py b/src/builder.py index 1c58892..b741226 100644 --- a/src/builder.py +++ b/src/builder.py @@ -52,27 +52,27 @@ def __str__(self): def gnu_configure(self, args=[], inplace=False): for path in util.walk_files(self._path): - filename = os.path.basename(path) + filename = path.name if filename in {'config.guess', 'config.sub'}: # Replace the config.guess and config.sub files by # up-to-date copies. The copies provided by the tarball # rarely support CloudABI. - os.unlink(path) - shutil.copy(os.path.join(config.DIR_RESOURCES, filename), path) + path.unlink() + shutil.copy(config.DIR_RESOURCES / filename, path) elif filename == 'ltmain.sh': # Patch up libtool to archive object files in sorted # order. This has been fixed in the meantime. - with open(path, 'r') as fin, open(path + '.new', 'w') as fout: + with path.open('r') as fin, (path + '.new').open('w') as fout: for l in fin.readlines(): # Add sort to the pipeline. fout.write(l.replace( '-print | $NL2SP', '-print | sort | $NL2SP')) shutil.copymode(path, path + '.new') - os.rename(path + '.new', path) + (path + '.new').rename(path) elif filename == 'configure': # Patch up configure scripts to remove constructs that are known # to fail, for example due to functions being missing. - with open(path, 'rb') as fin, open(path + '.new', 'wb') as fout: + with path.open('rb') as fin, (path + '.new').open('wb') as fout: for l in fin.readlines(): # Bad C99 features test. if l.startswith(b'#define showlist(...)'): @@ -81,20 +81,20 @@ def gnu_configure(self, args=[], inplace=False): l = b'#define report(...) fprintf (stderr, __VA_ARGS__)\n' fout.write(l) shutil.copymode(path, path + '.new') - os.rename(path + '.new', path) + (path + '.new').rename(path) # Run the configure script in a separate directory. builddir = (self._path if inplace else self._builder._build_directory.get_new_directory()) self._builder.gnu_configure( - builddir, os.path.join(self._path, 'configure'), args) + builddir, self._path / 'configure', args) return FileHandle(self._builder, builddir) def compile(self, args=[]): output = self._path + '.o' - os.chdir(os.path.dirname(self._path)) - ext = os.path.splitext(self._path)[1] + os.chdir(self._path.parent) + ext = self._path.suffix if ext in {'.c', '.S'}: log.info('CC %s', self._path) subprocess.check_call( @@ -124,7 +124,7 @@ def host(self): return FileHandle(self._builder._host_builder, self._path) def rename(self, dst): - os.rename(self._path, dst._path) + self._path.rename(dst._path) def cmake(self, args=[]): builddir = self._builder._build_directory.get_new_directory() @@ -133,11 +133,11 @@ def cmake(self, args=[]): # Skip directory names. while True: - entries = os.listdir(source_directory) + entries = list(source_directory.iterdir()) if len(entries) != 1: break - new_directory = os.path.join(source_directory, entries[0]) - if not os.path.isdir(new_directory): + new_directory = source_directory / entries[0] + if not new_directory.is_dir(): break source_directory = new_directory @@ -152,7 +152,7 @@ def make_install(self, args=['install']): self.run(['make', 'DESTDIR=' + stagedir] + args) return FileHandle( self._builder, - os.path.join(stagedir, self._builder.get_prefix()[1:])) + stagedir.pathjoin(self._builder.get_prefix()[1:])) def ninja(self): self.run(['ninja']) @@ -162,13 +162,13 @@ def ninja_install(self): self.run(['DESTDIR=' + stagedir, 'ninja', 'install']) return FileHandle( self._builder, - os.path.join(stagedir, self._builder.get_prefix()[1:])) + stagedir.pathjoin(self._builder.get_prefix()[1:])) def open(self, mode): - return open(self._path, mode) + return self._path.open(mode) def path(self, path): - return FileHandle(self._builder, os.path.join(self._path, path)) + return FileHandle(self._builder, self._path / path) def remove(self): util.remove(self._path) @@ -178,7 +178,7 @@ def run(self, command): def symlink(self, contents): util.remove(self._path) - os.symlink(contents, self._path) + self._path.symlink_to(contents) def unhardcode_paths(self): self._builder.unhardcode_paths(self._path) @@ -247,8 +247,8 @@ def prefix(self): return self._builder.get_prefix() def resource(self, name): - source = os.path.join(self._resource_directory, name) - target = os.path.join(config.DIR_BUILDROOT, 'build', name) + source = self._resource_directory / name + target = config.DIR_BUILDROOT / 'build', name util.make_parent_dir(target) util.copy_file(source, target, False) return FileHandle(self._builder, target) @@ -263,22 +263,22 @@ class BuildDirectory: def __init__(self): self._sequence_number = 0 - self._builddir = os.path.join(config.DIR_BUILDROOT, 'build') + self._builddir = config.DIR_BUILDROOT / 'build' def get_new_archive(self): - path = os.path.join(self._builddir, 'lib%d.a' % self._sequence_number) + path = self._builddir.pathjoin('lib%d.a' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 return path def get_new_directory(self): - path = os.path.join(self._builddir, str(self._sequence_number)) + path = self._builddir.pathjoin(str(self._sequence_number)) util.make_dir(path) self._sequence_number += 1 return path def get_new_executable(self): - path = os.path.join(self._builddir, 'bin%d' % self._sequence_number) + path = self._builddir.pathjoin('bin%d' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 return path @@ -291,7 +291,7 @@ def __init__(self, build_directory, install_directory): self._install_directory = install_directory self._cflags = [ - '-O2', '-I' + os.path.join(self.get_prefix(), 'include'), + '-O2', '-I' + self.get_prefix().pathjoin('include'), ] def gnu_configure(self, builddir, script, args): @@ -317,7 +317,7 @@ def get_cxx(): def get_gnu_triple(): # Run config.guess to determine the GNU triple of the system # we're running on. - config_guess = os.path.join(config.DIR_RESOURCES, 'config.guess') + config_guess = config.DIR_RESOURCES / 'config.guess' triple = subprocess.check_output(config_guess) return str(triple, encoding='ASCII').strip() @@ -327,12 +327,12 @@ def get_prefix(): def install(self, source, target): log.info('INSTALL %s->%s', source, target) - target = os.path.join(self._install_directory, target) + target = self._install_directory / target for source_file, target_file in util.walk_files_concurrently( source, target): # As these are bootstrapping tools, there is no need to # preserve any documentation and locales. - path = os.path.relpath(target_file, target) + path = target_file.relative_to(target) if (path != 'lib/charset.alias' and not path.startswith('share/doc/') and not path.startswith('share/info/') and @@ -349,8 +349,8 @@ def run(self, cwd, command): 'CXX=' + self.get_cxx(), 'CFLAGS=' + ' '.join(self._cflags), 'CXXFLAGS=' + ' '.join(self._cflags), - 'LDFLAGS=-L' + os.path.join(self.get_prefix(), 'lib'), - 'PATH=%s:%s' % (os.path.join(self.get_prefix(), 'bin'), + 'LDFLAGS=-L' + self.get_prefix().pathjoin('lib'), + 'PATH=%s:%s' % (self.get_prefix().pathjoin('bin'), os.getenv('PATH')), ] + command) @@ -368,8 +368,8 @@ def __init__(self, build_directory, install_directory, arch): self._prefix = '/' + ''.join( random.choice(string.ascii_letters) for i in range(16)) - self._bindir = os.path.join(config.DIR_BUILDROOT, 'bin') - self._localbase = os.path.join(config.DIR_BUILDROOT, self._arch) + self._bindir = config.DIR_BUILDROOT / 'bin' + self._localbase = config.DIR_BUILDROOT / self._arch self._cflags = [ '-O2', '-Werror=implicit-function-declaration', '-Werror=date-time', ] @@ -378,7 +378,7 @@ def __init__(self, build_directory, install_directory, arch): self._host_builder = HostBuilder(build_directory, None) def _tool(self, name): - return os.path.join(self._bindir, '%s-%s' % (self._arch, name)) + return self._bindir.pathjoin('%s-%s' % (self._arch, name)) def archive(self, object_files): objs = sorted(object_files) @@ -432,29 +432,29 @@ def get_prefix(self): return self._prefix def _unhardcode(self, source, target): - assert not os.path.islink(source) - with open(source, 'r') as f: + assert not source.is_symlink() + with source.open('r') as f: contents = f.read() contents = (contents .replace(self.get_prefix(), '%%PREFIX%%') .replace(self._localbase, '%%PREFIX%%')) - with open(target, 'w') as f: + with target.open('w') as f: f.write(contents) def unhardcode_paths(self, path): self._unhardcode(path, path + '.template') shutil.copymode(path, path + '.template') - os.unlink(path) + path.unlink() def install(self, source, target): log.info('INSTALL %s->%s', source, target) - target = os.path.join(self._install_directory, target) + target = self._install_directory / target for source_file, target_file in util.walk_files_concurrently( source, target): util.make_parent_dir(target_file) - relpath = os.path.relpath(target_file, self._install_directory) - ext = os.path.splitext(source_file)[1] - if ext in {'.la', '.pc'} and not os.path.islink(source_file): + relpath = target_file.relative_to(self._install_directory) + ext = source_file.suffix + if ext in {'.la', '.pc'} and not source_file.is_symlink(): # Remove references to the installation prefix and the # localbase directory from libtool archives and # pkg-config files. diff --git a/src/catalog.py b/src/catalog.py index 2a6118a..1af675b 100644 --- a/src/catalog.py +++ b/src/catalog.py @@ -32,7 +32,7 @@ def __init__(self, old_path, new_path): @staticmethod def _get_suggested_mode(path): - mode = os.lstat(path).st_mode + mode = path.lstat().st_mode if stat.S_ISLNK(mode): # Symbolic links. return 0o777 @@ -45,32 +45,32 @@ def _get_suggested_mode(path): @staticmethod def _sanitize_permissions(directory, directory_mode=0o555): - for root, dirs, files in os.walk(directory): + for root, dirs, files in util.walk(directory): util.lchmod(root, directory_mode) for filename in files: - path = os.path.join(root, filename) + path = root / filename util.lchmod(path, Catalog._get_suggested_mode(path)) @staticmethod def _run_tar(args): subprocess.check_call([ - os.path.join(config.DIR_BUILDROOT, 'bin/bsdtar') + config.DIR_BUILDROOT / 'bin/bsdtar' ] + args) def insert(self, package, version, source): - target = os.path.join( - self._new_path, self._get_filename(package, version)) + target = ( + self._new_path).pathjoin(self._get_filename(package, version)) util.make_dir(self._new_path) util.remove(target) - os.link(source, target) + source.link(target) self._packages.add((package, version)) def lookup_at_version(self, package, version): if self._old_path: - path = os.path.join( - self._old_path, + path = ( + self._old_path).pathjoin( self._get_filename(package, version)) - if os.path.exists(path): + if path.exists(): return path return None @@ -94,7 +94,7 @@ def __init__(self, old_path, new_path): # packages we're going to build. self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.split('_') if len(parts) == 3 and parts[2] == 'all.deb': @@ -145,41 +145,41 @@ def finish(self, private_key): def write_entry(f, package, version): f.write(self._get_control_snippet(package, version)) filename = self._get_filename(package, version) - path = os.path.join(self._new_path, filename) + path = self._new_path / filename f.write( 'Filename: %s\n' 'Size: %u\n' 'SHA256: %s\n' % ( filename, - os.path.getsize(path), + path.stat().st_size, util.sha256(path).hexdigest(), )) f.write('\n') - index = os.path.join(self._new_path, 'Packages') - with open(index, 'wt') as f, lzma.open(index + '.xz', 'wt') as f_xz: + index = self._new_path / 'Packages' + with index.open('wt') as f, lzma.open(index + '.xz', 'wt') as f_xz: for package, version in self._packages: write_entry(f, package, version) write_entry(f_xz, package, version) # Link the index into the per-architecture directory. for arch in self._architectures: - index_arch = os.path.join( - self._new_path, + index_arch = ( + self._new_path).pathjoin( 'dists/cloudabi/cloudabi/binary-%s/Packages' % arch) util.make_parent_dir(index_arch) - os.link(index, index_arch) - os.link(index + '.xz', index_arch + '.xz') + index.link(index_arch) + (index + '.xz').link(index_arch + '.xz') checksum = util.sha256(index).hexdigest() checksum_xz = util.sha256(index + '.xz').hexdigest() - size = os.path.getsize(index) - size_xz = os.path.getsize(index + '.xz') - os.unlink(index) - os.unlink(index + '.xz') + size = index.stat().st_size + size_xz = (index + '.xz').stat().st_size + index.unlink() + (index + '.xz').unlink() # Create the InRelease file. - with open( - os.path.join(self._new_path, 'dists/cloudabi/InRelease'), 'w' + with ( + self._new_path / 'dists/cloudabi/InRelease').open('w' ) as f, subprocess.Popen([ 'gpg', '--local-user', private_key, '--armor', '--sign', '--clearsign', '--digest-algo', 'SHA256', @@ -209,12 +209,12 @@ def package(self, package, version): log.info('PKG %s', self._get_filename(package, version)) rootdir = config.DIR_BUILDROOT - debian_binary = os.path.join(rootdir, 'debian-binary') - controldir = os.path.join(rootdir, 'control') - datadir = os.path.join(rootdir, 'data') + debian_binary = rootdir / 'debian-binary' + controldir = rootdir / 'control' + datadir = rootdir / 'data' # Create 'debian-binary' file. - with open(debian_binary, 'w') as f: + with debian_binary.open('w') as f: f.write('2.0\n') def tar(directory): @@ -227,26 +227,26 @@ def tar(directory): # Create 'data.tar.xz' tarball that contains the files that need # to be installed by the package. - prefix = os.path.join('/usr', package.get_arch()) + prefix = '/usr'.pathjoin(package.get_arch()) util.make_dir(datadir) - package.extract(os.path.join(datadir, prefix[1:]), prefix) + package.extract(datadir / prefix[1:], prefix) tar(datadir) # Create 'control.tar.xz' tarball that contains the control files. util.make_dir(controldir) datadir_files = sorted(util.walk_files(datadir)) - datadir_size = sum(os.path.getsize(fpath) for fpath in datadir_files) - with open(os.path.join(controldir, 'control'), 'w') as f: + datadir_size = sum(fpath.stat().st_size for fpath in datadir_files) + with (controldir / 'control').open('w') as f: f.write(self._get_control_snippet(package, version, datadir_size)) - with open(os.path.join(controldir, 'md5sums'), 'w') as f: + with (controldir / 'md5sums').open('w') as f: f.writelines('%s %s\n' % (util.md5(fpath).hexdigest(), - os.path.relpath(fpath, datadir)) + fpath.relative_to(datadir)) for fpath in datadir_files) tar(controldir) - path = os.path.join(rootdir, 'output.txz') + path = rootdir / 'output.txz' subprocess.check_call([ - os.path.join(rootdir, 'bin/llvm-ar'), 'rc', path, + rootdir / 'bin/llvm-ar', 'rc', path, debian_binary, controldir + '.tar.xz', datadir + '.tar.xz', ]) return path @@ -263,7 +263,7 @@ def __init__(self, old_path, new_path): # packages we're going to build. self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.rsplit('-', 1) if len(parts) == 2 and parts[1].endswith('.txz'): @@ -293,9 +293,9 @@ def package(self, package, version): # The package needs to be installed in /usr/local/ on the # FreeBSD system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr/local', arch) + prefix = '/usr/local' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) @@ -313,7 +313,7 @@ def package(self, package, version): '"flatsize":%(flatsize)d,' '"desc":"%(name)s for %(arch)s"' % { 'arch': arch, - 'flatsize': sum(os.lstat(path).st_size for path in files), + 'flatsize': sum(path.lstat().st_size for path in files), 'freebsd_name': package.get_freebsd_name(), 'homepage': package.get_homepage(), 'maintainer': package.get_maintainer(), @@ -326,21 +326,21 @@ def package(self, package, version): '\"%s\":{"origin":"devel/%s","version":"0"}' % (dep, dep) for dep in sorted(pkg.get_freebsd_name() for pkg in deps) ) - compact_manifest = os.path.join(config.DIR_BUILDROOT, + compact_manifest = config.DIR_BUILDROOT.pathjoin( '+COMPACT_MANIFEST') - with open(compact_manifest, 'w') as f: + with compact_manifest.open('w') as f: f.write(base_manifest) f.write('}') # Create the fill manifest. if files: - manifest = os.path.join(config.DIR_BUILDROOT, '+MANIFEST') - with open(manifest, 'w') as f: + manifest = config.DIR_BUILDROOT.pathjoin('+MANIFEST') + with manifest.open('w') as f: f.write(base_manifest) f.write(',"files":{') f.write(','.join( '"%s":"1$%s"' % ( - os.path.join(prefix, os.path.relpath(path, installdir)), + prefix.pathjoin(path.relative_to(installdir)), util.sha256(path).hexdigest()) for path in files)) f.write('}}') @@ -348,9 +348,9 @@ def package(self, package, version): manifest = compact_manifest # Create the package. - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.xz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: # Leading files in tarball. f.write('#mtree\n') f.write( @@ -360,12 +360,12 @@ def package(self, package, version): '+MANIFEST type=file mode=0644 uname=root gname=wheel time=0 contents=%s\n' % manifest) for path in files: - fullpath = os.path.join(prefix, os.path.relpath(path, installdir)) - if os.path.islink(path): + fullpath = prefix.pathjoin(path.relative_to(installdir)) + if path.is_symlink(): # Symbolic links. f.write( '%s type=link mode=0777 uname=root gname=wheel time=0 link=%s\n' % - (fullpath, os.readlink(path))) + (fullpath, path.readlink())) else: # Regular files. f.write( @@ -389,7 +389,7 @@ def __init__(self, old_path, new_path, url): # packages we're going to build. self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.split('|', 1) if len(parts) == 2: @@ -413,21 +413,21 @@ def insert(self, package, version, source): # Create symbolic to the tarball for every supported version of # Mac OS X. filename = self._get_filename(package, version) - linksdir = os.path.join(self._new_path, 'links') + linksdir = self._new_path / 'links' util.make_dir(linksdir) for osx_version in self._OSX_VERSIONS: - link = os.path.join(linksdir, + link = linksdir.pathjoin( '%s-%s.%s.bottle.tar.gz' % ( package.get_homebrew_name(), version.get_homebrew_version(), osx_version)) util.remove(link) - os.symlink(os.path.join('..', filename), link) + link.symlink_to('..' / filename) # Create a formula. - formulaedir = os.path.join(self._new_path, 'formulae') + formulaedir = self._new_path / 'formulae' util.make_dir(formulaedir) - with open(os.path.join(formulaedir, - package.get_homebrew_name() + '.rb'), 'w') as f: + with (formulaedir.pathjoin( + package.get_homebrew_name() + '.rb')).open('w') as f: # Header. f.write("""class %(homebrew_class)s < Formula desc "%(name)s for %(arch)s" @@ -473,21 +473,21 @@ def package(self, package, version): # The package needs to be installed in /usr/local/share/ # on the Mac OS X system. In the tarball, pathnames need to be # prefixed with /. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') - extractdir = os.path.join(installdir, package.get_homebrew_name(), + installdir = config.DIR_BUILDROOT / 'install' + extractdir = installdir.pathjoin(package.get_homebrew_name(), version.get_homebrew_version()) util.make_dir(extractdir) - package.extract(os.path.join(extractdir, 'share', package.get_arch()), - os.path.join('/usr/local/share', package.get_arch())) + package.extract(extractdir.pathjoin('share', package.get_arch()), + '/usr/local/share'.pathjoin(package.get_arch())) # Add a placeholder install receipt file. Homebrew depends on it # being present with at least these fields. - with open(os.path.join(extractdir, 'INSTALL_RECEIPT.json'), 'w') as f: + with (extractdir / 'INSTALL_RECEIPT.json').open('w') as f: f.write('{"used_options":[],"unused_options":[]}\n') # Archive the results. self._sanitize_permissions(installdir, directory_mode=0o755) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.gz') + output = config.DIR_BUILDROOT / 'output.tar.gz' self._run_tar([ '--options', 'gzip:!timestamp', '-czf', output, '-C', installdir, package.get_homebrew_name(), @@ -516,15 +516,15 @@ def package(self, package, version): # The package needs to be installed in /usr/pkg/ on the # NetBSD system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr/pkg', arch) + prefix = '/usr/pkg' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) # Package contents list. util.make_dir(installdir) - with open(os.path.join(installdir, '+CONTENTS'), 'w') as f: + with installdir.pathjoin('+CONTENTS').open('w') as f: f.write( '@cwd /usr/pkg/%s\n' '@name %s-%s\n' % ( @@ -534,12 +534,12 @@ def package(self, package, version): for pkg in package.get_lib_depends()): f.write('@pkgdep %s-[0-9]*\n' % dep) for path in files: - f.write(os.path.relpath(path, installdir) + '\n') + f.write(path.relative_to(installdir) + '\n') # Package description. - with open(os.path.join(installdir, '+COMMENT'), 'w') as f: + with installdir.pathjoin('+COMMENT').open('w') as f: f.write('%s for %s\n' % (package.get_name(), package.get_arch())) - with open(os.path.join(installdir, '+DESC'), 'w') as f: + with installdir.pathjoin('+DESC').open('w') as f: f.write( '%(name)s for %(arch)s\n' '\n' @@ -556,7 +556,7 @@ def package(self, package, version): # system, meaning that these packages are currently only # installable on NetBSD/x86-64. Figure out a way we can create # packages that are installable on any system that uses pkgsrc. - with open(os.path.join(installdir, '+BUILD_INFO'), 'w') as f: + with installdir.pathjoin('+BUILD_INFO').open('w') as f: f.write( 'MACHINE_ARCH=x86_64\n' 'PKGTOOLS_VERSION=00000000\n' @@ -565,12 +565,12 @@ def package(self, package, version): ) self._sanitize_permissions(installdir) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.xz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: f.write('+CONTENTS\n+COMMENT\n+DESC\n+BUILD_INFO\n') for path in files: - f.write(os.path.relpath(path, installdir) + '\n') + f.write(path.relative_to(installdir) + '\n') self._run_tar(['-cJf', output, '-C', installdir, '-T', listing]) return output @@ -596,15 +596,15 @@ def package(self, package, version): # The package needs to be installed in /usr/local/ on the # OpenBSD system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr/local', arch) + prefix = '/usr/local' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) # Package contents list. - contents = os.path.join(config.DIR_BUILDROOT, 'contents') - with open(contents, 'w') as f: + contents = config.DIR_BUILDROOT / 'contents' + with contents.open('w') as f: f.write( '@name %s-%s\n' '@cwd %s\n' % ( @@ -614,19 +614,19 @@ def package(self, package, version): written_dirs = set() for path in files: # Write entry for parent directories. - relpath = os.path.relpath(path, installdir) + relpath = path.relative_to(installdir) fullpath = '' - for component in os.path.dirname(relpath).split('/'): + for component in relpath.parent.split('/'): fullpath += component + '/' if fullpath not in written_dirs: f.write(fullpath + '\n') written_dirs.add(fullpath) - if os.path.islink(path): + if path.is_symlink(): # Write entry for symbolic link. f.write( '%s\n' - '@symlink %s\n' % (relpath, os.readlink(path))) + '@symlink %s\n' % (relpath, path.readlink())) else: # Write entry for regular file. f.write( @@ -636,11 +636,11 @@ def package(self, package, version): relpath, str(base64.b64encode( util.sha256(path).digest()), encoding='ASCII'), - os.lstat(path).st_size)) + path.lstat().st_size)) # Package description. - desc = os.path.join(config.DIR_BUILDROOT, 'desc') - with open(desc, 'w') as f: + desc = config.DIR_BUILDROOT / 'desc' + with desc.open('w') as f: f.write( '%(name)s for %(arch)s\n' '\n' @@ -655,9 +655,9 @@ def package(self, package, version): } ) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.gz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.gz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: # Leading files in tarball. f.write('#mtree\n') f.write( @@ -667,12 +667,12 @@ def package(self, package, version): '+DESC type=file mode=0666 uname=root gname=wheel time=0 contents=%s\n' % desc) for path in files: - relpath = os.path.relpath(path, installdir) - if os.path.islink(path): + relpath = path.relative_to(installdir) + if path.is_symlink(): # Symbolic links need to use 0o555 on OpenBSD. f.write( '%s type=link mode=0555 uname=root gname=wheel time=0 link=%s\n' % - (relpath, os.readlink(path))) + (relpath, path.readlink())) else: # Regular files. f.write( @@ -691,7 +691,7 @@ def __init__(self, old_path, new_path): self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.rsplit('-', 3) if len(parts) == 4 and parts[3] == 'any.pkg.tar.xz': @@ -717,15 +717,15 @@ def package(self, package, version): package.initialize_buildroot({'libarchive'}) log.info('PKG %s', self._get_filename(package, version)) - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr', arch) - package.extract(os.path.join(installdir, prefix[1:]), prefix) + prefix = '/usr' / arch + package.extract(installdir / prefix[1:], prefix) files = sorted(util.walk_files(installdir)) util.make_dir(installdir) - pkginfo = os.path.join(installdir, '.PKGINFO') - with open(pkginfo, 'w') as f: + pkginfo = installdir / '.PKGINFO' + with pkginfo.open('w') as f: f.write( 'pkgname = %(archlinux_name)s\n' 'pkgdesc = %(name)s for %(arch)s\n' @@ -734,7 +734,7 @@ def package(self, package, version): 'arch = any\n' % { 'arch': package.get_arch(), 'archlinux_name': package.get_archlinux_name(), - 'flatsize': sum(os.lstat(path).st_size for path in files), + 'flatsize': sum(path.lstat().st_size for path in files), 'name': package.get_name(), 'version': version.get_archlinux_version(), } @@ -743,27 +743,27 @@ def package(self, package, version): for dep in sorted(pkg.get_archlinux_name() for pkg in package.get_lib_depends()): f.write('depend = %s\n' % dep) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.xz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: f.write('.PKGINFO\n') for path in files: - f.write(os.path.relpath(path, installdir) + '\n') + f.write(path.relative_to(installdir) + '\n') - mtree = os.path.join(installdir, '.MTREE') + mtree = installdir / '.MTREE' - with open(listing, 'w') as f: + with listing.open('w') as f: f.write('#mtree\n') f.write( '.PKGINFO type=file mode=0644 uname=root gname=root time=0 contents=%s\n' % pkginfo) f.write( '.MTREE type=file mode=0644 uname=root gname=root time=0 contents=%s\n' % mtree) for path in files: - relpath = os.path.relpath(path, installdir) - if os.path.islink(path): + relpath = path.relative_to(installdir) + if path.is_symlink(): f.write( '%s type=link mode=0777 uname=root gname=root time=0 link=%s\n' % - (relpath, os.readlink(path))) + (relpath, path.readlink())) else: f.write( '%s type=file mode=0%o uname=root gname=root time=0 contents=%s\n' % @@ -787,9 +787,9 @@ def finish(self, private_key): subprocess.check_call([ 'gpg', '--detach-sign', '--local-user', private_key, '--no-armor', '--digest-algo', 'SHA256', - os.path.join(self._new_path, package_file)]) - db_file = os.path.join(self._new_path, 'cloudabi-ports.db.tar.xz') - packages = [os.path.join(self._new_path, self._get_filename(*p)) for p in self._packages] + self._new_path / package_file]) + db_file = self._new_path / 'cloudabi-ports.db.tar.xz' + packages = [self._new_path.pathjoin(self._get_filename(*p)) for p in self._packages] # Ensure that repo-add as a valid working directory. os.chdir('/') subprocess.check_call(['repo-add', '-s', '-k', private_key, db_file] + packages) @@ -802,7 +802,7 @@ def __init__(self, old_path, new_path): self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: if filename.endswith('.tar.xz'): parts = filename[:-7].rsplit('-', 2) @@ -825,15 +825,15 @@ def package(self, package, version): package.initialize_buildroot({'libarchive'}) log.info('PKG %s', self._get_filename(package, version)) - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr', arch) - package.extract(os.path.join(installdir, prefix[1:]), prefix) + prefix = '/usr' / arch + package.extract(installdir / prefix[1:], prefix) files = sorted(util.walk_files(installdir)) util.make_dir(installdir) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') + output = config.DIR_BUILDROOT / 'output.tar.xz' self._run_tar(['-cJf', output, '-C', installdir, '.']) @@ -841,17 +841,17 @@ def package(self, package, version): def finish(self, private_key): for cygwin_arch in ('x86', 'x86_64'): - cygwin_arch_dir = os.path.join(self._new_path, cygwin_arch) + cygwin_arch_dir = self._new_path / cygwin_arch util.make_dir(cygwin_arch_dir) - setup_file = os.path.join(cygwin_arch_dir, 'setup.ini') - with open(setup_file, 'w') as f: + setup_file = cygwin_arch_dir / 'setup.ini' + with setup_file.open('w') as f: f.write('release: cygwin\n') f.write('arch: %s\n' % cygwin_arch) f.write('setup-timestamp: %d\n' % int(time.time())) for package, version in sorted(self._packages, key=lambda p:p[0].get_cygwin_name()): package_file_name = self._get_filename(package, version) - package_file = os.path.join(self._new_path, package_file_name) + package_file = self._new_path / package_file_name f.write( '\n' '@ %(cygwinname)s\n' @@ -872,7 +872,7 @@ def finish(self, private_key): ); f.write( 'install: %(filename)s %(size)s %(sha512)s\n' % { - 'size': os.lstat(package_file).st_size, + 'size': package_file.lstat().st_size, 'filename': package_file_name, 'sha512': util.sha512(package_file).hexdigest(), } @@ -896,20 +896,20 @@ def _get_filename(package, version): @staticmethod def _file_linkto(filename): try: - return os.readlink(filename) + return filename.readlink() except OSError: return '' @staticmethod def _file_md5(filename): - if os.path.islink(filename): + if filename.is_symlink(): return '' else: return util.md5(filename).hexdigest() @staticmethod def _file_mode(filename): - mode = os.lstat(filename).st_mode + mode = filename.lstat().st_mode if stat.S_ISLNK(mode): # Symbolic links. return 0o120777 - 65536 @@ -922,7 +922,7 @@ def _file_mode(filename): @staticmethod def _file_size(filename): - sb = os.lstat(filename) + sb = filename.lstat() if stat.S_ISREG(sb.st_mode): return sb.st_size return 0 @@ -938,27 +938,27 @@ def package(self, package, version): # The package needs to be installed in /usr/arch> on the Red Hat # system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr', arch) + prefix = '/usr' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) # Create an xz compressed cpio payload containing all files. - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: f.write('#mtree\n') for path in files: - relpath = os.path.join(prefix, os.path.relpath(path, installdir)) - if os.path.islink(path): + relpath = prefix.pathjoin(path.relative_to(installdir)) + if path.is_symlink(): f.write( '%s type=link mode=0777 uname=root gname=root time=0 link=%s\n' % - (relpath, os.readlink(path))) + (relpath, path.readlink())) else: f.write( '%s type=file mode=0%o uname=root gname=root time=0 contents=%s\n' % (relpath, self._get_suggested_mode(path), path)) - data = os.path.join(config.DIR_BUILDROOT, 'data.cpio.xz') + data = config.DIR_BUILDROOT / 'data.cpio.xz' self._run_tar([ '-cJf', data, '--format=newc', '-C', installdir, '@' + listing, ]) @@ -969,7 +969,7 @@ def package(self, package, version): name = package.get_redhat_name() lib_depends = sorted(dep.get_redhat_name() for dep in package.get_lib_depends()) - dirs = sorted({os.path.dirname(f) for f in files}) + dirs = sorted({f.parent for f in files}) header = bytes(rpm.Header({ 100: rpm.StringArray(['C']), 1000: rpm.String(name), @@ -984,7 +984,7 @@ def package(self, package, version): 1020: rpm.String(package.get_homepage()), 1021: rpm.String('linux'), 1022: rpm.String('noarch'), - 1028: rpm.Int32(os.lstat(f).st_size for f in files), + 1028: rpm.Int32(f.lstat().st_size for f in files), 1030: rpm.Int16(self._file_mode(f) for f in files), 1033: rpm.Int16(0 for f in files), 1034: rpm.Int32(0 for f in files), @@ -1002,10 +1002,10 @@ def package(self, package, version): 1097: rpm.StringArray('' for f in files), 1112: rpm.Int32(8 for dep in lib_depends), 1113: rpm.StringArray([version.get_redhat_version()]), - 1116: rpm.Int32(dirs.index(os.path.dirname(f)) for f in files), - 1117: rpm.StringArray(os.path.basename(f) for f in files), - 1118: rpm.StringArray(os.path.join(prefix, - os.path.relpath(d, installdir)) + + 1116: rpm.Int32(dirs.index(f.parent) for f in files), + 1117: rpm.StringArray(f.name for f in files), + 1118: rpm.StringArray(prefix.pathjoin( + d.relative_to(installdir)) + '/' for d in dirs), 1124: rpm.String('cpio'), @@ -1018,13 +1018,13 @@ def package(self, package, version): checksum.update(header) util.hash_file(data, checksum) signature = bytes(rpm.Header({ - 1000: rpm.Int32([len(header) + os.stat(data).st_size]), + 1000: rpm.Int32([len(header) + data.stat().st_size]), 1004: rpm.Bin(checksum.digest()), })) # Create the RPM file. - output = os.path.join(config.DIR_BUILDROOT, 'output.rpm') - with open(output, 'wb') as f: + output = config.DIR_BUILDROOT / 'output.rpm' + with output.open('wb') as f: # The lead. f.write(b'\xed\xab\xee\xdb\x03\x00\x00\x00\x00\x00') fullname = '%s-%s' % (name, version.get_redhat_version()) @@ -1040,7 +1040,7 @@ def package(self, package, version): f.write(header) # The payload. - with open(data, 'rb') as fin: + with data.open('rb') as fin: shutil.copyfileobj(fin, f) return output @@ -1048,5 +1048,5 @@ def finish(self, private_key): subprocess.check_call(['createrepo', self._new_path]) subprocess.check_call([ 'gpg', '--detach-sign', '--local-user', private_key, - '--armor', os.path.join(self._new_path, 'repodata/repomd.xml'), + '--armor', self._new_path / 'repodata/repomd.xml', ]) diff --git a/src/catalog_set.py b/src/catalog_set.py index 3361ae6..8fa9c69 100644 --- a/src/catalog_set.py +++ b/src/catalog_set.py @@ -33,8 +33,8 @@ def _build_at_version(self, package, version, tmpdir): return False else: # A new package. Keep it. - new = os.path.join(tmpdir, str(len(do_rebuild))) - os.rename(path, new) + new = tmpdir.pathjoin(str(len(do_rebuild))) + path.rename(new) do_rebuild.append(catalog) # Round 2: Do a rebuild to ensure that the build is @@ -43,7 +43,7 @@ def _build_at_version(self, package, version, tmpdir): if do_rebuild: package.clean() for idx, catalog in enumerate(do_rebuild): - path1 = os.path.join(tmpdir, str(idx)) + path1 = tmpdir.pathjoin(str(idx)) path2 = catalog.package(package, version) if not util.file_contents_equal(path1, path2): raise Exception( diff --git a/src/distfile.py b/src/distfile.py index 2710760..05d359b 100644 --- a/src/distfile.py +++ b/src/distfile.py @@ -20,7 +20,7 @@ class Distfile: def __init__(self, distdir, name, checksum, master_sites, patches, unsafe_string_sources): for patch in patches: - if not os.path.isfile(patch): + if not patch.is_file(): raise Exception('Patch %s does not exist' % patch) self._distdir = distdir @@ -28,12 +28,12 @@ def __init__(self, distdir, name, checksum, master_sites, patches, self._checksum = checksum self._patches = patches self._unsafe_string_sources = unsafe_string_sources - self._pathname = os.path.join(distdir, self._name) + self._pathname = distdir / self._name # Compute distfile URLs based on the provided list of sites. # Also add fallback URLs in case the master sites are down. self._urls = { - site + os.path.basename(self._name) for site in master_sites + site + self._name.name for site in master_sites } | { site + self._name for site in config.FALLBACK_MIRRORS } @@ -43,12 +43,12 @@ def _apply_patch(patch, target): # Automatically determine the patchlevel by taking a look at the # first filename in the patch. patchlevel = 0 - with open(patch, 'rb') as f: + with patch.open('rb') as f: for l in f.readlines(): if l.startswith(b'--- '): filename = str(l[4:-1].split(b'\t', 1)[0], encoding='ASCII') while True: - if os.path.exists(os.path.join(target, filename)): + if (target / filename).exists(): # Correct patchlevel determined. break # Increment patchlevel once more. @@ -63,31 +63,31 @@ def _apply_patch(patch, target): break # Apply the patch. - with open(patch) as f: + with patch.open() as f: subprocess.check_call( ['patch', '-d', target, '-tsp%d' % patchlevel], stdin=f) # Delete .orig files that patch leaves behind. for path in util.walk_files(target): if path.endswith('.orig'): - os.unlink(path) + path.unlink() def _extract_unpatched(self, target): # Fetch and extract tarball. self._fetch() - tar = os.path.join(config.DIR_BUILDROOT, 'bin/bsdtar') - if not os.path.exists(tar): + tar = config.DIR_BUILDROOT / 'bin/bsdtar' + if not tar.exists(): tar = 'tar' util.make_dir(target) subprocess.check_call([tar, '-xC', target, '-f', self._pathname]) # Remove leading directory names. while True: - entries = os.listdir(target) + entries = list(target.iterdir()) if len(entries) != 1: return target - subdir = os.path.join(target, entries[0]) - if not os.path.isdir(subdir): + subdir = target / entries[0] + if not subdir.is_dir(): return target target = subdir @@ -105,7 +105,7 @@ def _fetch(self): log.info('FETCH %s', url) try: util.make_parent_dir(self._pathname) - with util.unsafe_fetch(url) as fin, open(self._pathname, 'wb') as fout: + with util.unsafe_fetch(url) as fin, self._pathname.open('wb') as fout: shutil.copyfileobj(fin, fout) except ConnectionResetError as e: log.warning(e) @@ -120,12 +120,12 @@ def extract(self, target): self._apply_patch(patch, target) # Add markers to sources that depend on unsafe string sources. for filename in self._unsafe_string_sources: - path = os.path.join(target, filename) - with open(path, 'rb') as fin, open(path + '.new', 'wb') as fout: + path = target / filename + with path.open('rb') as fin, (path + '.new').open('wb') as fout: fout.write(bytes('#define _CLOUDLIBC_UNSAFE_STRING_FUNCTIONS\n', encoding='ASCII')) fout.write(fin.read()) - os.rename(path + '.new', path) + (path + '.new').rename(path) return target def fixup_patches(self, tmpdir): @@ -133,15 +133,15 @@ def fixup_patches(self, tmpdir): return # Extract one copy of the code to diff against. util.remove(tmpdir) - orig_dir = self._extract_unpatched(os.path.join(tmpdir, 'orig')) + orig_dir = self._extract_unpatched(tmpdir / 'orig') for path in util.walk_files(orig_dir): if path.endswith('.orig'): - os.unlink(path) + path.unlink() for patch in sorted(self._patches): log.info('FIXUP %s', patch) # Apply individual patches to the code. - patched_dir = os.path.join(tmpdir, 'patched') + patched_dir = tmpdir / 'patched' util.remove(patched_dir) patched_dir = self._extract_unpatched(patched_dir) self._apply_patch(patch, patched_dir) diff --git a/src/package.py b/src/package.py index 033c675..300b692 100644 --- a/src/package.py +++ b/src/package.py @@ -52,7 +52,7 @@ def _initialize_buildroot(self): def build(self): # Skip this package if it has been built already. - if os.path.isdir(self._install_directory): + if self._install_directory.is_dir(): return # Perform the build inside an empty buildroot. @@ -100,7 +100,7 @@ def __str__(self): def build(self): # Skip this package if it has been built already. - if not self._build_cmd or os.path.isdir(self._install_directory): + if not self._build_cmd or self._install_directory.is_dir(): return # Perform the build inside a buildroot with its dependencies @@ -129,10 +129,10 @@ def extract(self, path, expandpath): if target_file.endswith('.template'): # File is a template. Expand %%PREFIX%% tags. target_file = target_file[:-9] - with open(source_file, 'r') as f: + with source_file.open('r') as f: contents = f.read() contents = contents.replace('%%PREFIX%%', expandpath) - with open(target_file, 'w') as f: + with target_file.open('w') as f: f.write(contents) shutil.copymode(source_file, target_file) else: @@ -198,6 +198,6 @@ def initialize_buildroot(self, host_depends, lib_depends=set()): util.remove_and_make_dir(config.DIR_BUILDROOT) for dep in host_deps: dep.extract() - prefix = os.path.join(config.DIR_BUILDROOT, self._arch) + prefix = config.DIR_BUILDROOT / self._arch for dep in lib_depends: dep.extract(prefix, prefix) diff --git a/src/repository.py b/src/repository.py index 8bdae71..351a053 100644 --- a/src/repository.py +++ b/src/repository.py @@ -43,16 +43,16 @@ def op_distfile(**kwargs): break # Automatically add patches if none are given. - dirname = os.path.dirname(path) + dirname = path.parent if 'patches' not in distfile: distfile['patches'] = (name[6:] - for name in os.listdir(dirname) + for name in list(dirname.iterdir()) if name.startswith('patch-')) if 'unsafe_string_sources' not in distfile: distfile['unsafe_string_sources'] = frozenset() # Turn patch filenames into full paths. - distfile['patches'] = {os.path.join(dirname, 'patch-' + patch) + distfile['patches'] = {dirname.pathjoin('patch-' + patch) for patch in distfile['patches']} if name in self._distfiles: @@ -64,7 +64,7 @@ def op_distfile(**kwargs): def op_host_package(**kwargs): package = kwargs - package['resource_directory'] = os.path.dirname(path) + package['resource_directory'] = path.parent name = package['name'] if name in self._deferred_host_packages: raise Exception('%s is redeclaring packages %s' % (path, name)) @@ -72,7 +72,7 @@ def op_host_package(**kwargs): def op_package(**kwargs): package = kwargs - package['resource_directory'] = os.path.dirname(path) + package['resource_directory'] = path.parent name = package['name'] for arch in config.ARCHITECTURES: if (name, arch) in self._deferred_target_packages: @@ -115,7 +115,7 @@ def op_sites_sourceforge(suffix): 'sites_sourceforge': op_sites_sourceforge, } - with open(path, 'r') as f: + with path.open('r') as f: exec(f.read(), identifiers, identifiers) def get_distfiles(self): @@ -141,8 +141,8 @@ def get_host_package(name): del package['lib_depends'] package['version'] = SimpleVersion(package['version']) self._host_packages[name] = HostPackage( - install_directory=os.path.join( - self._install_directory, + install_directory=( + self._install_directory).pathjoin( 'host', name), distfiles=self._distfiles, @@ -172,8 +172,8 @@ def get_target_package(name, arch): del package['lib_depends'] package['version'] = SimpleVersion(package['version']) self._target_packages[(name, arch)] = TargetPackage( - install_directory=os.path.join( - self._install_directory, arch, name), + install_directory=( + self._install_directory) / arch / name, arch=arch, distfiles=self._distfiles, host_packages=self._host_packages, @@ -192,8 +192,7 @@ def get_target_package(name, arch): packages = self._target_packages.copy() for arch in config.ARCHITECTURES: packages[('everything', arch)] = TargetPackage( - install_directory=os.path.join(self._install_directory, arch, - 'everything'), + install_directory=self._install_directory / arch / 'everything', arch=arch, name='everything', version=SimpleVersion('1.0'), From 676dc708af19b4a83f1762cd0752ab0928deba46 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 01:08:56 -0500 Subject: [PATCH 02/28] src.util static type hints in preparation for ocap refactor --- src/util.py | 43 +++++++++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/src/util.py b/src/util.py index 42732c1..e852889 100644 --- a/src/util.py +++ b/src/util.py @@ -3,6 +3,7 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from typing import Union, Iterator, Tuple import gzip import hashlib import os @@ -12,7 +13,7 @@ import urllib.request -def copy_file(source, target, preserve_attributes): +def copy_file(source: str, target: str, preserve_attributes: bool): if os.path.exists(target): raise Exception('About to overwrite %s with %s' % (source, target)) if os.path.islink(source): @@ -33,7 +34,7 @@ def copy_file(source, target, preserve_attributes): raise Exception(source + ' is of an unsupported type') -def diff(orig_dir, patched_dir, patch): +def diff(orig_dir: str, patched_dir: str, patch: str): proc = subprocess.Popen(['diff', '-urN', orig_dir, patched_dir], stdout=subprocess.PIPE) minline = bytes('--- %s/' % orig_dir, encoding='ASCII') @@ -56,7 +57,8 @@ def diff(orig_dir, patched_dir, patch): else: f.write(l) -def file_contents_equal(path1, path2): + +def file_contents_equal(path1: str, path2: str) -> bool: # Compare file contents. with open(path1, 'rb') as f1, open(path2, 'rb') as f2: while True: @@ -68,12 +70,16 @@ def file_contents_equal(path1, path2): return True -def gzip_file(source, target): +def gzip_file(source: str, target: str): with open(source, 'rb') as f1, gzip.GzipFile(target, 'wb', mtime=0) as f2: - shutil.copyfileobj(f1, f2) + shutil.copyfileobj(f1, f2) # type: ignore + + +# So says the standard python 3.4 stubs +_UrlopenRet = Union[urllib.request.HTTPResponse, urllib.request.addinfourl] -def unsafe_fetch(url): +def unsafe_fetch(url: str) -> _UrlopenRet: # Fetch a file over HTTP, HTTPS or FTP. For HTTPS, we don't do any # certificate checking. The caller should validate the authenticity # of the result. @@ -88,7 +94,7 @@ def unsafe_fetch(url): return urllib.request.urlopen(url) -def lchmod(path, mode): +def lchmod(path: str, mode: int): try: os.lchmod(path, mode) except AttributeError: @@ -96,18 +102,18 @@ def lchmod(path, mode): os.chmod(path, mode) -def make_dir(path): +def make_dir(path: str): try: os.makedirs(path) except FileExistsError: pass -def make_parent_dir(path): +def make_parent_dir(path: str): make_dir(os.path.dirname(path)) -def _remove(path): +def _remove(path: str): try: shutil.rmtree(path) except FileNotFoundError: @@ -116,7 +122,7 @@ def _remove(path): os.unlink(path) -def remove(path): +def remove(path: str): try: # First try to remove the file or directory directly. _remove(path) @@ -128,7 +134,7 @@ def remove(path): _remove(path) -def remove_and_make_dir(path): +def remove_and_make_dir(path: str): try: remove(path) except FileNotFoundError: @@ -136,7 +142,7 @@ def remove_and_make_dir(path): make_dir(path) -def hash_file(path, checksum): +def hash_file(path: str, checksum: hashlib.Hash): if os.path.islink(path): checksum.update(bytes(os.readlink(path), encoding='ASCII')) else: @@ -148,25 +154,25 @@ def hash_file(path, checksum): checksum.update(data) -def sha256(path): +def sha256(path: str) -> hashlib.Hash: checksum = hashlib.sha256() hash_file(path, checksum) return checksum -def sha512(path): +def sha512(path) -> hashlib.Hash: checksum = hashlib.sha512() hash_file(path, checksum) return checksum -def md5(path): +def md5(path) -> hashlib.Hash: checksum = hashlib.md5() hash_file(path, checksum) return checksum -def walk_files(path): +def walk_files(path: str) -> Iterator[str]: if os.path.isdir(path): for root, dirs, files in os.walk(path): # Return all files. @@ -181,7 +187,8 @@ def walk_files(path): yield path -def walk_files_concurrently(source, target): +def walk_files_concurrently(source: str, + target: str) -> Iterator[Tuple[str, str]]: for source_filename in walk_files(source): target_filename = os.path.normpath( os.path.join(target, os.path.relpath(source_filename, source))) From 7e0c2d9bb055c41bb49336ef39e0a2a4078a4b15 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 01:59:15 -0500 Subject: [PATCH 03/28] define pathlib.Path subclass with shutil methods --- src/util.py | 62 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/src/util.py b/src/util.py index e852889..8031fb1 100644 --- a/src/util.py +++ b/src/util.py @@ -3,7 +3,9 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. -from typing import Union, Iterator, Tuple +# Use the Path type only; the constructor is ambient authority +from pathlib import Path as PathT, PurePath, PurePosixPath +from typing import AnyStr, Generic, Iterator, Tuple, Type, TypeVar, Union import gzip import hashlib import os @@ -12,6 +14,64 @@ import ssl import urllib.request +Self = TypeVar('Self') + + +class PathExt(Generic[Self], PathT): + # fix lack of parameter in PurePath type decl + # ref https://github.com/python/typeshed/issues/553 + def with_name(self, name: str) -> Self: # type: ignore + raise NotImplementedError + + def __add__(self, suffix: str) -> Self: + raise NotImplementedError + + def copy(self, target: PathExt): + raise NotImplementedError + + def copystat(self, target: PathExt): + raise NotImplementedError + + def copymode(self, target: PathExt): + raise NotImplementedError + + def rmtree(self): + raise NotImplementedError + + def readlink(self) -> AnyStr: + raise NotImplementedError + + def link(self, dst: PathExt): + raise NotImplementedError + + +def mix_shutil_path(concrete: Type[PurePosixPath], + shutil, os_link) -> Type[PathExt]: + class PathWithShUtil(concrete, PathExt): # type: ignore + def __add__(self, suffix: str) -> PathWithShUtil: + return self.with_name(self.name + suffix) + + def copy(self, target: PathExt): + shutil.copy(str(self), str(target)) + + def copystat(self, target: PathExt): + shutil.copystat(str(self), str(target)) + + def copymode(self, target: PathExt): + shutil.copymode(str(self), str(target)) + + def rmtree(self): + shutil.rmtree(str(self)) + + def readlink(self): + # KLUDGE: peek into undocumented pathlib API + return self._accessor.readlink(str(self)) + + def link(self, dst: PathExt): + os_link(str(self), str(dst)) + + return PathWithShUtil + def copy_file(source: str, target: str, preserve_attributes: bool): if os.path.exists(target): From a7e8cc073f97066802316270e1a95eb5f51213d5 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 02:34:16 -0500 Subject: [PATCH 04/28] explicit Path authority in util.copy_file, etc. --- src/util.py | 137 +++++++++++++++++++++++++++++----------------------- 1 file changed, 77 insertions(+), 60 deletions(-) diff --git a/src/util.py b/src/util.py index 8031fb1..d2e9e88 100644 --- a/src/util.py +++ b/src/util.py @@ -3,13 +3,13 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from itertools import filterfalse, tee # Use the Path type only; the constructor is ambient authority from pathlib import Path as PathT, PurePath, PurePosixPath +from shutil import copyfileobj from typing import AnyStr, Generic, Iterator, Tuple, Type, TypeVar, Union import gzip import hashlib -import os -import shutil import subprocess import ssl import urllib.request @@ -17,12 +17,26 @@ Self = TypeVar('Self') -class PathExt(Generic[Self], PathT): +class PathTFix(Generic[Self], PathT): # fix lack of parameter in PurePath type decl # ref https://github.com/python/typeshed/issues/553 + def iterdir(self) -> Iterator[Self]: # type: ignore + raise NotImplementedError + + def relative_to(self, *other: str) -> Self: # type: ignore + raise NotImplementedError + + def resolve(self) -> Self: # type: ignore + raise NotImplementedError + def with_name(self, name: str) -> Self: # type: ignore raise NotImplementedError + parent = None # type: Self + + +class PathExt(PathTFix): + def __add__(self, suffix: str) -> Self: raise NotImplementedError @@ -73,22 +87,22 @@ def link(self, dst: PathExt): return PathWithShUtil -def copy_file(source: str, target: str, preserve_attributes: bool): - if os.path.exists(target): +def copy_file(source: PathExt, target: PathExt, preserve_attributes: bool): + if target.exists(): raise Exception('About to overwrite %s with %s' % (source, target)) - if os.path.islink(source): + if source.is_symlink(): # Preserve symbolic links. - destination = os.readlink(source) - if os.path.isabs(destination): + destination = source.readlink() + if PurePosixPath(destination).is_absolute(): raise Exception( '%s points to absolute location %s', source, destination) - os.symlink(destination, target) - elif os.path.isfile(source): + target.symlink_to(destination) + elif source.is_file(): # Copy regular files. - shutil.copy(source, target) + source.copy(target) if preserve_attributes: - shutil.copystat(source, target) + source.copystat(target) else: # Bail out on anything else. raise Exception(source + ' is of an unsupported type') @@ -118,9 +132,9 @@ def diff(orig_dir: str, patched_dir: str, patch: str): f.write(l) -def file_contents_equal(path1: str, path2: str) -> bool: +def file_contents_equal(path1: PathT, path2: PathT) -> bool: # Compare file contents. - with open(path1, 'rb') as f1, open(path2, 'rb') as f2: + with path1.open('rb') as f1, path2.open('rb') as f2: while True: b1 = f1.read(16384) b2 = f2.read(16384) @@ -130,10 +144,10 @@ def file_contents_equal(path1: str, path2: str) -> bool: return True -def gzip_file(source: str, target: str): - with open(source, 'rb') as f1, gzip.GzipFile(target, 'wb', mtime=0) as f2: - shutil.copyfileobj(f1, f2) # type: ignore - +def gzip_file(source: PathT, target: PathT): + with source.open('rb') as f1, target.open('wb') as ft, gzip.GzipFile( + fileobj=ft, mode='wb', mtime=0) as f2: + copyfileobj(f1, f2) # type: ignore slight mismatch with GzipFile # So says the standard python 3.4 stubs @@ -154,47 +168,43 @@ def unsafe_fetch(url: str) -> _UrlopenRet: return urllib.request.urlopen(url) -def lchmod(path: str, mode: int): - try: - os.lchmod(path, mode) - except AttributeError: - if not os.path.islink(path): - os.chmod(path, mode) +def lchmod(path: PathT, mode: int): + path.lchmod(mode) -def make_dir(path: str): +def make_dir(path: PathT): try: - os.makedirs(path) + path.mkdir(parents=True) except FileExistsError: pass -def make_parent_dir(path: str): - make_dir(os.path.dirname(path)) +def make_parent_dir(path: PathExt): + make_dir(path.parent) -def _remove(path: str): +def _remove(path: PathExt): try: - shutil.rmtree(path) + path.rmtree() except FileNotFoundError: pass except (NotADirectoryError, OSError): - os.unlink(path) + path.unlink() -def remove(path: str): +def remove(path: PathExt): try: # First try to remove the file or directory directly. _remove(path) except PermissionError: # If that fails, add write permissions to the directories stored # inside and retry. - for root, dirs, files in os.walk(path): - os.chmod(root, 0o755) + for root, dirs, files in walk(path): + root.chmod(0o755) _remove(path) -def remove_and_make_dir(path: str): +def remove_and_make_dir(path: PathExt): try: remove(path) except FileNotFoundError: @@ -202,11 +212,11 @@ def remove_and_make_dir(path: str): make_dir(path) -def hash_file(path: str, checksum: hashlib.Hash): - if os.path.islink(path): - checksum.update(bytes(os.readlink(path), encoding='ASCII')) +def hash_file(path: PathExt, checksum: hashlib.Hash): + if path.is_symlink(): + checksum.update(bytes(path.readlink(), encoding='ASCII')) else: - with open(path, 'rb') as f: + with path.open('rb') as f: while True: data = f.read(16384) if not data: @@ -214,42 +224,49 @@ def hash_file(path: str, checksum: hashlib.Hash): checksum.update(data) -def sha256(path: str) -> hashlib.Hash: +def sha256(path: PathExt) -> hashlib.Hash: checksum = hashlib.sha256() hash_file(path, checksum) return checksum -def sha512(path) -> hashlib.Hash: +def sha512(path: PathExt) -> hashlib.Hash: checksum = hashlib.sha512() hash_file(path, checksum) return checksum -def md5(path) -> hashlib.Hash: +def md5(path: PathExt) -> hashlib.Hash: checksum = hashlib.md5() hash_file(path, checksum) return checksum -def walk_files(path: str) -> Iterator[str]: - if os.path.isdir(path): - for root, dirs, files in os.walk(path): - # Return all files. - for f in files: - yield os.path.join(root, f) - # Return all symbolic links to directories as well. - for f in dirs: - fullpath = os.path.join(root, f) - if os.path.islink(fullpath): - yield fullpath - elif os.path.exists(path): +def walk_files(path: PathExt) -> Iterator[PathExt]: + if path.is_dir(): + for sub in path.iterdir(): + yield from walk_files(sub) + # Return all symbolic links to directories as well. + if path.is_symlink(): + yield path.resolve() + elif path.exists(): yield path -def walk_files_concurrently(source: str, - target: str) -> Iterator[Tuple[str, str]]: - for source_filename in walk_files(source): - target_filename = os.path.normpath( - os.path.join(target, os.path.relpath(source_filename, source))) - yield source_filename, target_filename +def walk(path: PathExt): + def is_dir(p): + return p.is_dir() + if path.is_dir(): + root = path + dirs, files = tee(root.iterdir()) + dirs = list(filter(is_dir, dirs)) + files = list(filterfalse(is_dir, files)) + yield root, dirs, files + for subdir in dirs: + yield from walk(subdir) + + +def walk_files_concurrently(source: PathExt, target: PathExt) -> Iterator[Tuple[PathExt, PathExt]]: + for source_file in walk_files(source): + target_file = (target / source_file.relative_to(str(source))).resolve() + yield source_file, target_file From 49eb9eb9c4d9f7d80448e572dbe3eac19008304e Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 02:59:48 -0500 Subject: [PATCH 05/28] pass web access to unsafe_fetch explicitly --- src/util.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/util.py b/src/util.py index d2e9e88..ccd9f52 100644 --- a/src/util.py +++ b/src/util.py @@ -7,12 +7,13 @@ # Use the Path type only; the constructor is ambient authority from pathlib import Path as PathT, PurePath, PurePosixPath from shutil import copyfileobj -from typing import AnyStr, Generic, Iterator, Tuple, Type, TypeVar, Union +from typing import (AnyStr, Callable, Generic, Iterator, + Tuple, Type, TypeVar, Union) +from urllib.request import HTTPResponse, addinfourl import gzip import hashlib import subprocess import ssl -import urllib.request Self = TypeVar('Self') @@ -151,9 +152,11 @@ def gzip_file(source: PathT, target: PathT): # So says the standard python 3.4 stubs -_UrlopenRet = Union[urllib.request.HTTPResponse, urllib.request.addinfourl] +_UrlopenRet = Union[HTTPResponse, addinfourl] +_UrlopenFn = Callable[..., _UrlopenRet] -def unsafe_fetch(url: str) -> _UrlopenRet: + +def unsafe_fetch(url: str, urlopen: _UrlopenFn) -> _UrlopenRet: # Fetch a file over HTTP, HTTPS or FTP. For HTTPS, we don't do any # certificate checking. The caller should validate the authenticity # of the result. @@ -162,10 +165,10 @@ def unsafe_fetch(url: str) -> _UrlopenRet: ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE - return urllib.request.urlopen(url, context=ctx) + return urlopen(url, context=ctx) except TypeError: # Python < 3.4.3. - return urllib.request.urlopen(url) + return urlopen(url) def lchmod(path: PathT, mode: int): From dafbb5d75b7a9af47ca1f3db320e65e58ccd43a8 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 03:04:48 -0500 Subject: [PATCH 06/28] pass Popen authority to patch() explicitly --- src/util.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/util.py b/src/util.py index ccd9f52..8f96f8f 100644 --- a/src/util.py +++ b/src/util.py @@ -7,12 +7,12 @@ # Use the Path type only; the constructor is ambient authority from pathlib import Path as PathT, PurePath, PurePosixPath from shutil import copyfileobj +from subprocess import PIPE from typing import (AnyStr, Callable, Generic, Iterator, Tuple, Type, TypeVar, Union) from urllib.request import HTTPResponse, addinfourl import gzip import hashlib -import subprocess import ssl Self = TypeVar('Self') @@ -109,12 +109,13 @@ def copy_file(source: PathExt, target: PathExt, preserve_attributes: bool): raise Exception(source + ' is of an unsupported type') -def diff(orig_dir: str, patched_dir: str, patch: str): - proc = subprocess.Popen(['diff', '-urN', orig_dir, patched_dir], - stdout=subprocess.PIPE) +def diff(orig_dir: PathExt, patched_dir: PathExt, patch: PathExt, + Popen): + proc = Popen(['diff', '-urN', str(orig_dir), str(patched_dir)], + stdout=PIPE) minline = bytes('--- %s/' % orig_dir, encoding='ASCII') plusline = bytes('+++ %s/' % patched_dir, encoding='ASCII') - with open(patch, 'wb') as f: + with patch.open('wb') as f: for l in proc.stdout.readlines(): if l.startswith(b'diff '): # Omit lines that start with 'diff'. They serve From be5b21297661701b3249ba43d43cb2166e982408 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 10:43:05 -0500 Subject: [PATCH 07/28] src/rpm: conventional (PEP8) whitespace style - 4 space indentation - 79 characater line length --- src/rpm.py | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/src/rpm.py b/src/rpm.py index 5f64a1e..86df305 100644 --- a/src/rpm.py +++ b/src/rpm.py @@ -5,6 +5,7 @@ import struct + class Header: """Class for generating binary RPM headers.""" @@ -22,7 +23,9 @@ def __bytes__(self): align = value.alignment() values += b'\0' * (((align - len(values)) % align) % align) # Create index entry. - indices += struct.pack('>iiii', tag, value.type(), len(values), value.count()) + indices += struct.pack('>iiii', + tag, value.type(), + len(values), value.count()) # Append the entry's value. values += value.encode() @@ -31,6 +34,7 @@ def __bytes__(self): struct.pack('>ii', len(indices) // 16, len(values)) + indices + values) + class Int16: """List of 16 bit signed integers.""" @@ -39,7 +43,7 @@ def __init__(self, values): @staticmethod def alignment(): - return 2 + return 2 def count(self): return len(self._values) @@ -49,7 +53,8 @@ def encode(self): @staticmethod def type(): - return 3 + return 3 + class Int32: """List of 32 bit signed integers.""" @@ -59,7 +64,7 @@ def __init__(self, values): @staticmethod def alignment(): - return 4 + return 4 def count(self): return len(self._values) @@ -69,7 +74,8 @@ def encode(self): @staticmethod def type(): - return 4 + return 4 + class String: """Single C string.""" @@ -79,7 +85,7 @@ def __init__(self, value): @staticmethod def alignment(): - return 1 + return 1 @staticmethod def count(): @@ -90,7 +96,8 @@ def encode(self): @staticmethod def type(): - return 6 + return 6 + class Bin: """Binary blob.""" @@ -100,7 +107,7 @@ def __init__(self, value): @staticmethod def alignment(): - return 1 + return 1 def count(self): return len(self._value) @@ -110,7 +117,8 @@ def encode(self): @staticmethod def type(): - return 7 + return 7 + class StringArray: """Sequence of C strings.""" @@ -120,7 +128,7 @@ def __init__(self, values): @staticmethod def alignment(): - return 1 + return 1 def count(self): return len(self._values) @@ -131,7 +139,8 @@ def encode(self): @staticmethod def type(): - return 8 + return 8 + class I18NString: """Sequence of strings stored in a native character set. @@ -143,7 +152,7 @@ def __init__(self, value): @staticmethod def alignment(): - return 1 + return 1 @staticmethod def count(): @@ -154,4 +163,4 @@ def encode(self): @staticmethod def type(): - return 9 + return 9 From b657d1355a51a90c58b0ab9552650f01679343ea Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 10:48:30 -0500 Subject: [PATCH 08/28] use pkgutil.get_data for config resources While get_data(), like import, is actually runtime I/O, we consider access to design-time constants as if it were access to any other static data (string, integer, ...). --- src/config.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/config.py b/src/config.py index 6e146ae..40b7eac 100644 --- a/src/config.py +++ b/src/config.py @@ -3,8 +3,8 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from pkgutil import get_data import platform -import os # Architectures for which we can build packages. ARCHITECTURES = { @@ -19,8 +19,9 @@ # build directory. Debug symbols and __FILE__ use absolute paths. DIR_BUILDROOT = '/usr/obj/cloudabi-ports' -# Location where resource files are stored. -DIR_RESOURCES = os.path.join(os.getcwd(), 'misc') +# Resource files. +RESOURCES = dict((name, get_data('misc', name)) + for name in ['config.guess', 'config.sub']) # Location at which distfiles can be fetched in case the master sites # are down. From ed67148c13992d204dd2e9b87a1cae32f71642db Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 12:37:27 -0500 Subject: [PATCH 09/28] make the src dir into a package (helps with mypy) --- src/__init__.py | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 src/__init__.py diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..383b407 --- /dev/null +++ b/src/__init__.py @@ -0,0 +1,3 @@ +# "The __init__.py files are required to make Python treat the +# directories as containing packages" +# -- https://docs.python.org/3/tutorial/modules.html From 93dcccdac16a7877097789bdb52bbd86adb637f2 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 12:40:35 -0500 Subject: [PATCH 10/28] distfile: use object capability discipline - no powerful imports: os, subprocess, ... - copyfileobj is powerless: file access is passed in - PurePosixPath for syntax manipulation is also powerless - use PathExt rather than strings for access to files - allfiles KLUDGE: navigation thru absolute paths - PathTFix.__truediv__ Self type work-around (for p / sub) - cast .iterdir() result because our Self type work-around isn't smart enough - open() -> path.open() - pass urlopen, subprocess, random to Distfile as a distfile.Access - simplify, export UrlopenFn: always returns BinaryIO - util.RunCommand class represents subprocess interface - declare type for util.diff mkPopen arg - Distfile.__init__() static types (e.g. Hash) help with refactoring - complete static types for all Distfile methods - log.warning() is only declared to take a str. hm. --- src/distfile.py | 118 +++++++++++++++++++++++++++++------------------- src/util.py | 36 +++++++++------ 2 files changed, 93 insertions(+), 61 deletions(-) diff --git a/src/distfile.py b/src/distfile.py index 2710760..2a3189b 100644 --- a/src/distfile.py +++ b/src/distfile.py @@ -3,52 +3,66 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from hashlib import Hash +from pathlib import PurePosixPath +from random import Random +from shutil import copyfileobj +from typing import Any, NamedTuple, cast +from urllib.error import URLError import logging -import os -import random -import shutil -import subprocess -import urllib from . import config from . import util +from .util import PathExt log = logging.getLogger(__name__) + +class Access(NamedTuple('Access', [ + ('urlopen', util.UrlopenFn), + ('subprocess', util.RunCommand), + ('random', Random)])): + pass + + class Distfile: - def __init__(self, distdir, name, checksum, master_sites, patches, - unsafe_string_sources): + def __init__(self, distdir: PathExt, io: Access, name: str, checksum: Hash, + master_sites: List[str], + patches: List[PathExt], + unsafe_string_sources: List[str]) -> None: for patch in patches: - if not os.path.isfile(patch): + if not patch.exists(): raise Exception('Patch %s does not exist' % patch) self._distdir = distdir + self._io = io self._name = name self._checksum = checksum self._patches = patches self._unsafe_string_sources = unsafe_string_sources - self._pathname = os.path.join(distdir, self._name) + self._pathname = distdir / self._name # type: PathExt # Compute distfile URLs based on the provided list of sites. # Also add fallback URLs in case the master sites are down. self._urls = { - site + os.path.basename(self._name) for site in master_sites + site + PurePosixPath(self._name).name for site in master_sites } | { site + self._name for site in config.FALLBACK_MIRRORS } @staticmethod - def _apply_patch(patch, target): + def _apply_patch(patch: PathExt, target: PathExt, + subprocess: util.RunCommand): # Automatically determine the patchlevel by taking a look at the # first filename in the patch. patchlevel = 0 - with open(patch, 'rb') as f: + with patch.open('rb') as f: for l in f.readlines(): if l.startswith(b'--- '): filename = str(l[4:-1].split(b'\t', 1)[0], encoding='ASCII') while True: - if os.path.exists(os.path.join(target, filename)): + if (target / filename).exists(): # Correct patchlevel determined. break # Increment patchlevel once more. @@ -63,35 +77,41 @@ def _apply_patch(patch, target): break # Apply the patch. - with open(patch) as f: + with patch.open() as f: subprocess.check_call( - ['patch', '-d', target, '-tsp%d' % patchlevel], stdin=f) + ['patch', '-d', str(target), '-tsp%d' % patchlevel], stdin=f) # Delete .orig files that patch leaves behind. for path in util.walk_files(target): - if path.endswith('.orig'): - os.unlink(path) + if path.suffix == '.orig': + path.unlink() + + def _extract_unpatched(self, target: PathExt): + io = self._io - def _extract_unpatched(self, target): # Fetch and extract tarball. self._fetch() - tar = os.path.join(config.DIR_BUILDROOT, 'bin/bsdtar') - if not os.path.exists(tar): + allfiles = target # KLUDGE + tar = allfiles / config.DIR_BUILDROOT / 'bin/bsdtar' + if not tar.exists(): tar = 'tar' util.make_dir(target) - subprocess.check_call([tar, '-xC', target, '-f', self._pathname]) + io.subprocess.check_call([tar, + '-xC', str(target), + '-f', str(self._pathname)]) # Remove leading directory names. while True: - entries = os.listdir(target) + entries = list(target.iterdir()) if len(entries) != 1: return target - subdir = os.path.join(target, entries[0]) - if not os.path.isdir(subdir): + subdir = cast(PathExt, entries[0]) + if not subdir.is_dir(): return target target = subdir - def _fetch(self): + def _fetch(self) -> None: + io = self._io for i in range(10): log.info('CHECKSUM %s', self._pathname) # Validate the existing file on disk. @@ -99,52 +119,56 @@ def _fetch(self): if util.sha256(self._pathname).hexdigest() == self._checksum: return except FileNotFoundError as e: - log.warning(e) + log.warning(str(e)) - url = random.sample(self._urls, 1)[0] + url = io.random.sample(self._urls, 1)[0] log.info('FETCH %s', url) try: util.make_parent_dir(self._pathname) - with util.unsafe_fetch(url) as fin, open(self._pathname, 'wb') as fout: - shutil.copyfileobj(fin, fout) + with util.unsafe_fetch(url, io.urlopen) as fin, \ + self._pathname.open('wb') as fout: + copyfileobj(fin, fout) except ConnectionResetError as e: - log.warning(e) - except urllib.error.URLError as e: - log.warning(e) + log.warning(str(e)) + except URLError as e: + log.warning(str(e)) raise Exception('Failed to fetch %s' % self._name) - def extract(self, target): + def extract(self, target: PathExt): + io = self._io target = self._extract_unpatched(target) # Apply patches. for patch in self._patches: - self._apply_patch(patch, target) + self._apply_patch(patch, target, io.subprocess) # Add markers to sources that depend on unsafe string sources. for filename in self._unsafe_string_sources: - path = os.path.join(target, filename) - with open(path, 'rb') as fin, open(path + '.new', 'wb') as fout: - fout.write(bytes('#define _CLOUDLIBC_UNSAFE_STRING_FUNCTIONS\n', - encoding='ASCII')) - fout.write(fin.read()) - os.rename(path + '.new', path) + path = target / filename + with path.open('rb') as fin, (path + '.new').open('wb') as fout: + fout.write( + bytes('#define _CLOUDLIBC_UNSAFE_STRING_FUNCTIONS\n', + encoding='ASCII')) + fout.write(fin.read()) + (path + '.new').rename(path) return target - def fixup_patches(self, tmpdir): + def fixup_patches(self, tmpdir: PathExt): + io = self._io if not self._patches: return # Extract one copy of the code to diff against. util.remove(tmpdir) - orig_dir = self._extract_unpatched(os.path.join(tmpdir, 'orig')) + orig_dir = self._extract_unpatched(tmpdir / 'orig') for path in util.walk_files(orig_dir): - if path.endswith('.orig'): - os.unlink(path) + if path.suffix == '.orig': + path.unlink() for patch in sorted(self._patches): log.info('FIXUP %s', patch) # Apply individual patches to the code. - patched_dir = os.path.join(tmpdir, 'patched') + patched_dir = tmpdir / 'patched' util.remove(patched_dir) patched_dir = self._extract_unpatched(patched_dir) - self._apply_patch(patch, patched_dir) + self._apply_patch(patch, patched_dir, io.subprocess) # Generate a new patch. - util.diff(orig_dir, patched_dir, patch) + util.diff(orig_dir, patched_dir, patch, io.subprocess) diff --git a/src/util.py b/src/util.py index 8f96f8f..cb535fa 100644 --- a/src/util.py +++ b/src/util.py @@ -7,20 +7,33 @@ # Use the Path type only; the constructor is ambient authority from pathlib import Path as PathT, PurePath, PurePosixPath from shutil import copyfileobj -from subprocess import PIPE -from typing import (AnyStr, Callable, Generic, Iterator, - Tuple, Type, TypeVar, Union) -from urllib.request import HTTPResponse, addinfourl +from subprocess import PIPE, Popen as PopenT +from typing import (AnyStr, BinaryIO, Callable, Generic, Iterator, + List, Tuple, Type, TypeVar, Union, cast) import gzip import hashlib import ssl +UrlopenFn = Callable[..., BinaryIO] + Self = TypeVar('Self') +_SubPath = Union [str, PurePath] + + +class RunCommand(object): + def Popen(self, args: List[str], **kwargs) -> PopenT: + raise NotImplementedError + + def check_call(self, args: List[str], **kwargs) -> int: + raise NotImplementedError class PathTFix(Generic[Self], PathT): # fix lack of parameter in PurePath type decl # ref https://github.com/python/typeshed/issues/553 + def __truediv__(self, key: _SubPath) -> Self: # type: ignore + raise NotImplementedError + def iterdir(self) -> Iterator[Self]: # type: ignore raise NotImplementedError @@ -110,9 +123,9 @@ def copy_file(source: PathExt, target: PathExt, preserve_attributes: bool): def diff(orig_dir: PathExt, patched_dir: PathExt, patch: PathExt, - Popen): - proc = Popen(['diff', '-urN', str(orig_dir), str(patched_dir)], - stdout=PIPE) + subprocess: RunCommand): + proc = subprocess.Popen(['diff', '-urN', str(orig_dir), str(patched_dir)], + stdout=PIPE) minline = bytes('--- %s/' % orig_dir, encoding='ASCII') plusline = bytes('+++ %s/' % patched_dir, encoding='ASCII') with patch.open('wb') as f: @@ -149,15 +162,10 @@ def file_contents_equal(path1: PathT, path2: PathT) -> bool: def gzip_file(source: PathT, target: PathT): with source.open('rb') as f1, target.open('wb') as ft, gzip.GzipFile( fileobj=ft, mode='wb', mtime=0) as f2: - copyfileobj(f1, f2) # type: ignore slight mismatch with GzipFile - - -# So says the standard python 3.4 stubs -_UrlopenRet = Union[HTTPResponse, addinfourl] -_UrlopenFn = Callable[..., _UrlopenRet] + copyfileobj(f1, cast(BinaryIO, f2)) -def unsafe_fetch(url: str, urlopen: _UrlopenFn) -> _UrlopenRet: +def unsafe_fetch(url: str, urlopen: UrlopenFn) -> BinaryIO: # Fetch a file over HTTP, HTTPS or FTP. For HTTPS, we don't do any # certificate checking. The caller should validate the authenticity # of the result. From 583b758d806bcd67bff7391b35bcc0987fcfe9f1 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 14:07:20 -0500 Subject: [PATCH 11/28] src.package: static types, explicit authority --- src/package.py | 82 +++++++++++++++++++++++++++++--------------------- 1 file changed, 48 insertions(+), 34 deletions(-) diff --git a/src/package.py b/src/package.py index 033c675..27249a5 100644 --- a/src/package.py +++ b/src/package.py @@ -3,23 +3,28 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from typing import Callable, Optional, Set, cast import logging -import os -import shutil -import stat -import subprocess from . import config from . import util from .builder import BuildDirectory, BuildHandle, HostBuilder, TargetBuilder +from .util import PathExt log = logging.getLogger(__name__) +WHAT = int + + class HostPackage: - def __init__(self, install_directory, name, version, homepage, - maintainer, build_depends, lib_depends, distfiles, - build_cmd, resource_directory): + def __init__(self, install_directory: PathExt, + name: str, version: str, homepage :str, maintainer: str, + build_depends: Set[HostPackage], + lib_depends: Set[HostPackage], + distfiles: WHAT, + build_cmd: Callable[[BuildHandle], None], + resource_directory: PathExt) -> None: self._install_directory = install_directory self._name = name self._version = version @@ -27,32 +32,34 @@ def __init__(self, install_directory, name, version, homepage, self._build_cmd = build_cmd self._resource_directory = resource_directory + self._build_depends = set() # type: Set[HostPackage] + self._lib_depends = set() # type: Set[HostPackage] + # Compute the set of transitive build dependencies. - self._build_depends = set() for dep in build_depends: self._build_depends.add(dep) self._build_depends |= dep._lib_depends # Compute the set of transitive library dependencies. - self._lib_depends = set() for dep in lib_depends: self._lib_depends.add(dep) self._lib_depends |= dep._lib_depends - def _initialize_buildroot(self): + def _initialize_buildroot(self) -> None: # Ensure that all dependencies have been built. deps = self._build_depends | self._lib_depends for dep in deps: dep.build() # Install dependencies into an empty buildroot. - util.remove_and_make_dir(config.DIR_BUILDROOT) + allfiles = self._install_directory # KLUDGE + util.remove_and_make_dir(allfiles / config.DIR_BUILDROOT) for dep in deps: dep.extract() - def build(self): + def build(self) -> None: # Skip this package if it has been built already. - if os.path.isdir(self._install_directory): + if self._install_directory.is_dir(): return # Perform the build inside an empty buildroot. @@ -66,17 +73,22 @@ def build(self): def extract(self): # Copy files literally. + allfiles = self._install_directory # KLUDGE for source_file, target_file in util.walk_files_concurrently( - self._install_directory, config.DIR_BUILDROOT): + self._install_directory, allfiles / config.DIR_BUILDROOT): util.make_parent_dir(target_file) util.copy_file(source_file, target_file, False) class TargetPackage: - def __init__(self, install_directory, arch, name, version, homepage, - maintainer, host_packages, lib_depends, build_cmd, - distfiles, resource_directory): + def __init__(self, install_directory: PathExt, + arch: str, name: str, version: str, homepage: str, + maintainer: str, + host_packages: Dict[str, HostPackage], + lib_depends: Set[TargetPackage], + build_cmd: Optional[Callable[[BuildHandle], None]], + distfiles: WHAT, resource_directory: PathExt) -> None: self._install_directory = install_directory self._arch = arch self._name = name @@ -89,7 +101,7 @@ def __init__(self, install_directory, arch, name, version, homepage, self._resource_directory = resource_directory # Compute the set of transitive library dependencies. - self._lib_depends = set() + self._lib_depends = set() # type: Set[TargetPackage] for dep in lib_depends: if dep._build_cmd: self._lib_depends.add(dep) @@ -100,7 +112,7 @@ def __str__(self): def build(self): # Skip this package if it has been built already. - if not self._build_cmd or os.path.isdir(self._install_directory): + if not self._build_cmd or self._install_directory.is_dir(): return # Perform the build inside a buildroot with its dependencies @@ -126,15 +138,15 @@ def extract(self, path, expandpath): for source_file, target_file in util.walk_files_concurrently( self._install_directory, path): util.make_parent_dir(target_file) - if target_file.endswith('.template'): + if target_file.suffix == '.template': # File is a template. Expand %%PREFIX%% tags. - target_file = target_file[:-9] - with open(source_file, 'r') as f: + target_file = target_file.with_name(target_file.name[:-9]) + with source_file.open(mode='r') as f: contents = f.read() contents = contents.replace('%%PREFIX%%', expandpath) - with open(target_file, 'w') as f: + with target_file.open(mode='w') as f: f.write(contents) - shutil.copymode(source_file, target_file) + source_file.copymode(target_file) else: # Regular file. Copy it over literally. util.copy_file(source_file, target_file, False) @@ -169,7 +181,7 @@ def get_redhat_name(self): def get_homepage(self): return self._homepage - def get_lib_depends(self): + def get_lib_depends(self) -> Set[TargetPackage]: return self._lib_depends def get_maintainer(self): @@ -181,23 +193,25 @@ def get_name(self): def get_version(self): return self._version - def initialize_buildroot(self, host_depends, lib_depends=set()): + def initialize_buildroot(self, host_depends: List[str], + lib_depends: Set[TargetPackage]=set()) -> None: # Ensure that all dependencies have been built. host_deps = set() - for dep in host_depends: - package = self._host_packages[dep] + for dep_name in host_depends: + package = self._host_packages[dep_name] host_deps.add(package) for depdep in package._lib_depends: host_deps.add(depdep) for dep in host_deps: dep.build() - for dep in lib_depends: - dep.build() + for ldep in lib_depends: + ldep.build() # Install dependencies into an empty buildroot. - util.remove_and_make_dir(config.DIR_BUILDROOT) + allfiles = self._install_directory # KLUDGE + util.remove_and_make_dir(allfiles / config.DIR_BUILDROOT) for dep in host_deps: dep.extract() - prefix = os.path.join(config.DIR_BUILDROOT, self._arch) - for dep in lib_depends: - dep.extract(prefix, prefix) + prefix = allfiles / config.DIR_BUILDROOT, self._arch + for ldep in lib_depends: + ldep.extract(prefix, prefix) From 4427e05ec1808f2c506337bd9b7037f700da8bd5 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 14:16:37 -0500 Subject: [PATCH 12/28] prune dead "Skip directory names." code --- src/builder.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/builder.py b/src/builder.py index 1c58892..aa2a1f4 100644 --- a/src/builder.py +++ b/src/builder.py @@ -131,16 +131,6 @@ def cmake(self, args=[]): self._builder.cmake(builddir, self._path, args) return FileHandle(self._builder, builddir) - # Skip directory names. - while True: - entries = os.listdir(source_directory) - if len(entries) != 1: - break - new_directory = os.path.join(source_directory, entries[0]) - if not os.path.isdir(new_directory): - break - source_directory = new_directory - def install(self, path='.'): self._builder.install(self._path, path) From 06ff4cbc06354aceac978a26e0349572a28f47ea Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 18:35:21 -0500 Subject: [PATCH 13/28] src.builder: explicit authority, static types - DiffCreator gets subprocess - BuildDirectory gets platform - FileHandle etc. get an Access (subprocess etc.) - add check_output to RunCommand interface - Builder interface common to HostBuilder and TargetBuilder - refactor allpath KLUDGE using PathExt.platform() and config.config_path_fn - use single-assignment idiom for tar vs bsdtar - prune resource_directory - restore DIR_RESOURCES but only as a PurePath for use with check_call() - Distfile needs a RandomT - never mind PathTFix generic Self It didn't work -- turned into Any -- and hid a few type errors. --- src/builder.py | 308 ++++++++++++++++++++++++++++-------------------- src/config.py | 55 +++++++-- src/distfile.py | 21 ++-- src/package.py | 57 ++++----- src/util.py | 42 ++++--- 5 files changed, 287 insertions(+), 196 deletions(-) diff --git a/src/builder.py b/src/builder.py index aa2a1f4..8c194ad 100644 --- a/src/builder.py +++ b/src/builder.py @@ -3,31 +3,29 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from random import Random as RandomT +from typing import Any, Callable, Dict, List, IO, NamedTuple, Optional, cast import logging -import os -import random -import shutil -import string -import subprocess from . import config from . import util +from .distfile import Distfile +from .util import PathExt log = logging.getLogger(__name__) -def _chdir(path): - util.make_dir(path) - os.chdir(path) - class DiffCreator: - def __init__(self, source_directory, build_directory, filename): + def __init__(self, source_directory: PathExt, + build_directory: BuildDirectory, + filename: PathExt, subprocess: util.RunCommand) -> None: self._source_directory = source_directory self._build_directory = build_directory self._filename = filename + self._subprocess = subprocess - def __enter__(self): + def __enter__(self) -> None: # Create a backup of the source directory. self._backup_directory = self._build_directory.get_new_directory() for source_file, backup_file in util.walk_files_concurrently( @@ -35,44 +33,51 @@ def __enter__(self): util.make_parent_dir(backup_file) util.copy_file(source_file, backup_file, False) - def __exit__(self, type, value, traceback): + def __exit__(self, type, value, traceback) -> None: # Create a diff to store the changes that were made to the original. util.diff(self._backup_directory, self._source_directory, - self._filename) + self._filename, self._subprocess) + + +Access = NamedTuple('FH_Access', [ + ('subprocess', util.RunCommand), + ('chdir', Callable[[PathExt], None]), + ('getenv', Callable[[str], str])]) class FileHandle: - def __init__(self, builder, path): + def __init__(self, builder: Builder, path: PathExt, io: Access) -> None: self._builder = builder self._path = path + self._io = io def __str__(self): return self._path def gnu_configure(self, args=[], inplace=False): for path in util.walk_files(self._path): - filename = os.path.basename(path) + filename = path.name if filename in {'config.guess', 'config.sub'}: # Replace the config.guess and config.sub files by # up-to-date copies. The copies provided by the tarball # rarely support CloudABI. - os.unlink(path) - shutil.copy(os.path.join(config.DIR_RESOURCES, filename), path) + path.unlink() + path.open('wb').write(config.RESOURCES[filename]) elif filename == 'ltmain.sh': # Patch up libtool to archive object files in sorted # order. This has been fixed in the meantime. - with open(path, 'r') as fin, open(path + '.new', 'w') as fout: + with path.open('r') as fin, (path + '.new').open('w') as fout: for l in fin.readlines(): # Add sort to the pipeline. fout.write(l.replace( '-print | $NL2SP', '-print | sort | $NL2SP')) - shutil.copymode(path, path + '.new') - os.rename(path + '.new', path) + path.copymode(path + '.new') + (path + '.new').rename(path) elif filename == 'configure': # Patch up configure scripts to remove constructs that are known # to fail, for example due to functions being missing. - with open(path, 'rb') as fin, open(path + '.new', 'wb') as fout: + with path.open('rb') as fin, (path + '.new').open('wb') as fout: for l in fin.readlines(): # Bad C99 features test. if l.startswith(b'#define showlist(...)'): @@ -80,21 +85,22 @@ def gnu_configure(self, args=[], inplace=False): elif l.startswith(b'#define report(test,...)'): l = b'#define report(...) fprintf (stderr, __VA_ARGS__)\n' fout.write(l) - shutil.copymode(path, path + '.new') - os.rename(path + '.new', path) + path.copymode(path + '.new') + (path + '.new').rename(path) # Run the configure script in a separate directory. builddir = (self._path if inplace else self._builder._build_directory.get_new_directory()) self._builder.gnu_configure( - builddir, os.path.join(self._path, 'configure'), args) - return FileHandle(self._builder, builddir) + builddir, self._path / 'configure', args) + return FileHandle(self._builder, builddir, self._io) def compile(self, args=[]): + subprocess = self._io.subprocess output = self._path + '.o' - os.chdir(os.path.dirname(self._path)) - ext = os.path.splitext(self._path)[1] + self._io.chdir(self._path.parent) + ext = self._path.suffix if ext in {'.c', '.S'}: log.info('CC %s', self._path) subprocess.check_call( @@ -107,9 +113,10 @@ def compile(self, args=[]): args + ['-c', '-o', output, self._path]) else: raise Exception('Unknown file extension: %s' % ext) - return FileHandle(self._builder, output) + return FileHandle(self._builder, output, self._io) - def debug_shell(self): + def debug_shell(self) -> None: + os = self._io self.run([ 'HOME=' + os.getenv('HOME'), 'LC_CTYPE=' + os.getenv('LC_CTYPE'), @@ -117,19 +124,21 @@ def debug_shell(self): 'sh', ]) - def diff(self, filename): - return DiffCreator(self._path, self._builder._build_directory, filename) + def diff(self, filename: PathExt) -> DiffCreator: + return DiffCreator(self._path, self._builder._build_directory, filename, + self._io.subprocess) - def host(self): - return FileHandle(self._builder._host_builder, self._path) + def host(self) -> FileHandle: + builder = cast(TargetBuilder, self._builder) + return FileHandle(builder._host_builder, self._path, self._io) - def rename(self, dst): - os.rename(self._path, dst._path) + def rename(self, dst: FileHandle) -> None: + self._path.rename(dst._path) - def cmake(self, args=[]): + def cmake(self, args: List[str]=[]) -> FileHandle: builddir = self._builder._build_directory.get_new_directory() self._builder.cmake(builddir, self._path, args) - return FileHandle(self._builder, builddir) + return FileHandle(self._builder, builddir, self._io) def install(self, path='.'): self._builder.install(self._path, path) @@ -142,7 +151,8 @@ def make_install(self, args=['install']): self.run(['make', 'DESTDIR=' + stagedir] + args) return FileHandle( self._builder, - os.path.join(stagedir, self._builder.get_prefix()[1:])) + stagedir.pathjoin(self._builder.get_prefix()[1:]), + self._io) def ninja(self): self.run(['ninja']) @@ -152,23 +162,24 @@ def ninja_install(self): self.run(['DESTDIR=' + stagedir, 'ninja', 'install']) return FileHandle( self._builder, - os.path.join(stagedir, self._builder.get_prefix()[1:])) + stagedir.pathjoin(self._builder.get_prefix()[1:]), + self._io) - def open(self, mode): - return open(self._path, mode) + def open(self, mode: str) -> IO[Any]: + return self._path.open(mode) - def path(self, path): - return FileHandle(self._builder, os.path.join(self._path, path)) + def path(self, path: str) -> FileHandle: + return FileHandle(self._builder, self._path / path, self._io) - def remove(self): + def remove(self) -> None: util.remove(self._path) - def run(self, command): + def run(self, command: List[str]) -> None: self._builder.run(self._path, command) - def symlink(self, contents): + def symlink(self, contents: str) -> None: util.remove(self._path) - os.symlink(contents, self._path) + self._path.symlink_to(contents) def unhardcode_paths(self): self._builder.unhardcode_paths(self._path) @@ -176,72 +187,77 @@ def unhardcode_paths(self): class BuildHandle: - def __init__(self, builder, name, version, distfiles, resource_directory): + def __init__(self, builder: Builder, name: str, version: str, + distfiles: Dict[str, Distfile], + io: Access) -> None: self._builder = builder self._name = name self._version = version self._distfiles = distfiles - self._resource_directory = resource_directory + self._io = io def archive(self, objects): return FileHandle(self._builder, - self._builder.archive(obj._path for obj in objects)) + self._builder.archive(obj._path for obj in objects), + self._io) - def cc(self): + def cc(self) -> str: return self._builder.get_cc() - def cflags(self): + def cflags(self) -> str: return ' '.join(self._builder.get_cflags()) - def cpu(self): + def cpu(self) -> str: return self._builder.get_cpu() - def cxx(self): + def cxx(self) -> str: return self._builder.get_cxx() - def cxxflags(self): + def cxxflags(self) -> str: return ' '.join(self._builder.get_cxxflags()) @staticmethod - def endian(): + def endian() -> str: # TODO(ed): Extend this once we support big endian CPUs as well. return 'little' - def executable(self, objects): + def executable(self, objects: List[FileHandle]) -> FileHandle: objs = sorted(obj._path for obj in objects) output = self._builder._build_directory.get_new_executable() log.info('LD %s', output) + subprocess = self._io.subprocess subprocess.check_call([self._builder.get_cc(), '-o', output] + objs) - return FileHandle(self._builder, output) + return FileHandle(self._builder, output, self._io) def extract(self, name='%(name)s-%(version)s'): return FileHandle( self._builder, self._distfiles[ name % {'name': self._name, 'version': self._version} - ].extract(self._builder._build_directory.get_new_directory()) + ].extract(self._builder._build_directory.get_new_directory()), + self._io ) - def gnu_triple(self): + def gnu_triple(self) -> str: return self._builder.get_gnu_triple() def host(self): return BuildHandle( self._builder._host_builder, self._name, self._version, - self._distfiles, self._resource_directory) + self._distfiles, self._io) - def localbase(self): + def localbase(self) -> str: return self._builder.get_localbase() - def prefix(self): + def prefix(self) -> PathExt: return self._builder.get_prefix() - def resource(self, name): - source = os.path.join(self._resource_directory, name) - target = os.path.join(config.DIR_BUILDROOT, 'build', name) + def resource(self, name: str) -> FileHandle: + root = self._builder._platform(config.DIR_BUILDROOT) + target = root / 'build' / name util.make_parent_dir(target) - util.copy_file(source, target, False) - return FileHandle(self._builder, target) + target.open('wb').write(config.RESOURCES[name]) + return FileHandle(self._builder, target, self._io) @staticmethod def stack_direction(): @@ -251,37 +267,68 @@ def stack_direction(): class BuildDirectory: - def __init__(self): + def __init__(self, platform: Callable[[object], PathExt]) -> None: self._sequence_number = 0 - self._builddir = os.path.join(config.DIR_BUILDROOT, 'build') + self._builddir = platform(config.DIR_BUILDROOT) / 'build' def get_new_archive(self): - path = os.path.join(self._builddir, 'lib%d.a' % self._sequence_number) + path = self._builddir.pathjoin('lib%d.a' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 return path def get_new_directory(self): - path = os.path.join(self._builddir, str(self._sequence_number)) + path = self._builddir / str(self._sequence_number) util.make_dir(path) self._sequence_number += 1 return path def get_new_executable(self): - path = os.path.join(self._builddir, 'bin%d' % self._sequence_number) + path = self._builddir.pathjoin('bin%d' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 return path -class HostBuilder: - - def __init__(self, build_directory, install_directory): +class Builder: + def __init__(self, build_directory: BuildDirectory, + install_directory: PathExt, io: Access) -> None: self._build_directory = build_directory self._install_directory = install_directory + self._platform = install_directory.platform() + self._io = io + + def cmake(self, builddir: PathExt, srcdir: PathExt, + args: List[str]=[]) -> FileHandle: pass + + def run(self, cwd: PathExt, command: List[str]) -> None: pass + + def get_cc(self) -> str: pass + + def get_cflags(self) -> List[str]: pass + + def get_cpu(self) -> str: pass + + def get_cxx(self) -> str: pass + + def get_cxxflags(self) -> List[str]: pass + + def get_gnu_triple(self) -> str: pass + + def get_localbase(self) -> str: pass + + def get_prefix(self) -> PathExt: pass + + +class HostBuilder(Builder): + + def __init__(self, build_directory: BuildDirectory, + install_directory: Optional[PathExt], + io: Access) -> None: + Builder.__init__(self, build_directory, install_directory, io) self._cflags = [ - '-O2', '-I' + os.path.join(self.get_prefix(), 'include'), + '-O2', '-I' + str(self.get_prefix().pathjoin('include')), ] def gnu_configure(self, builddir, script, args): @@ -292,22 +339,20 @@ def cmake(self, builddir, sourcedir, args): 'cmake', sourcedir, '-G', 'Ninja', '-DCMAKE_BUILD_TYPE=Release', '-DCMAKE_INSTALL_PREFIX=' + self.get_prefix()] + args) - @staticmethod - def get_cc(): - return config.HOST_CC + def get_cc(self) -> str: + return str(self._platform(config.HOST_CC)) - def get_cflags(self): + def get_cflags(self) -> List[str]: return self._cflags - @staticmethod - def get_cxx(): - return config.HOST_CXX + def get_cxx(self) -> str: + return str(self._platform(config.HOST_CXX)) - @staticmethod - def get_gnu_triple(): + def get_gnu_triple(self): # Run config.guess to determine the GNU triple of the system # we're running on. - config_guess = os.path.join(config.DIR_RESOURCES, 'config.guess') + config_guess = config.DIR_RESOURCES / 'config.guess' + subprocess = self._io.subprocess triple = subprocess.check_output(config_guess) return str(triple, encoding='ASCII').strip() @@ -315,14 +360,14 @@ def get_gnu_triple(): def get_prefix(): return config.DIR_BUILDROOT - def install(self, source, target): + def install(self, source: PathExt, target: PathExt) -> None: log.info('INSTALL %s->%s', source, target) - target = os.path.join(self._install_directory, target) + target = self._install_directory / target for source_file, target_file in util.walk_files_concurrently( source, target): # As these are bootstrapping tools, there is no need to # preserve any documentation and locales. - path = os.path.relpath(target_file, target) + path = str(target_file.relative_to(target)) if (path != 'lib/charset.alias' and not path.startswith('share/doc/') and not path.startswith('share/info/') and @@ -331,46 +376,50 @@ def install(self, source, target): util.make_parent_dir(target_file) util.copy_file(source_file, target_file, False) - def run(self, cwd, command): - _chdir(cwd) + def run(self, cwd: PathExt, command: List[str]) -> None: + os = self._io + os.chdir(cwd) + subprocess = self._io.subprocess subprocess.check_call([ 'env', - 'CC=' + self.get_cc(), - 'CXX=' + self.get_cxx(), + 'CC=' + str(self.get_cc()), + 'CXX=' + str(self.get_cxx()), 'CFLAGS=' + ' '.join(self._cflags), 'CXXFLAGS=' + ' '.join(self._cflags), - 'LDFLAGS=-L' + os.path.join(self.get_prefix(), 'lib'), - 'PATH=%s:%s' % (os.path.join(self.get_prefix(), 'bin'), + 'LDFLAGS=-L' + self.get_prefix().pathjoin('lib'), + 'PATH=%s:%s' % (self.get_prefix().pathjoin('bin'), os.getenv('PATH')), ] + command) -class TargetBuilder: +class TargetBuilder(Builder): - def __init__(self, build_directory, install_directory, arch): - self._build_directory = build_directory - self._install_directory = install_directory + def __init__(self, build_directory: BuildDirectory, + install_directory: PathExt, arch: str, + io: Access) -> None: + Builder.__init__(self, build_directory, install_directory, io) self._arch = arch # Pick a random prefix directory. That way the build will fail # due to nondeterminism in case our piece of software hardcodes # the prefix directory. - self._prefix = '/' + ''.join( - random.choice(string.ascii_letters) for i in range(16)) + platform = install_directory.platform() + self._prefix = platform(config.RANDOM) - self._bindir = os.path.join(config.DIR_BUILDROOT, 'bin') - self._localbase = os.path.join(config.DIR_BUILDROOT, self._arch) + self._bindir = platform(config.DIR_BUILDROOT) / 'bin' + self._localbase = platform(config.DIR_BUILDROOT) / self._arch self._cflags = [ '-O2', '-Werror=implicit-function-declaration', '-Werror=date-time', ] # In case we need to build software for the host system. - self._host_builder = HostBuilder(build_directory, None) + self._host_builder = HostBuilder(build_directory, None, self._io) - def _tool(self, name): - return os.path.join(self._bindir, '%s-%s' % (self._arch, name)) + def _tool(self, name) -> str: + return str(self._bindir.pathjoin('%s-%s' % (self._arch, name))) def archive(self, object_files): + subprocess = self._io.subprocess objs = sorted(object_files) output = self._build_directory.get_new_archive() log.info('AR %s', output) @@ -397,54 +446,54 @@ def cmake(self, builddir, sourcedir, args): '-DCMAKE_SYSTEM_PROCESSOR=' + self._arch.split('-')[0], '-DUNIX=YES'] + args) - def get_cc(self): + def get_cc(self) -> str: return self._tool('cc') - def get_cflags(self): + def get_cflags(self) -> List[str]: return self._cflags - def get_cpu(self): + def get_cpu(self) -> str: return self._arch.split('-', 1)[0] - def get_cxx(self): + def get_cxx(self) -> str: return self._tool('c++') - def get_cxxflags(self): + def get_cxxflags(self) -> List[str]: return self._cflags - def get_gnu_triple(self): + def get_gnu_triple(self) -> str: return self._arch - def get_localbase(self): - return self._localbase + def get_localbase(self) -> str: + return str(self._localbase) - def get_prefix(self): + def get_prefix(self) -> PathExt: return self._prefix - def _unhardcode(self, source, target): - assert not os.path.islink(source) - with open(source, 'r') as f: + def _unhardcode(self, source: PathExt, target: PathExt) -> None: + assert not source.is_symlink() + with source.open('r') as f: contents = f.read() contents = (contents .replace(self.get_prefix(), '%%PREFIX%%') .replace(self._localbase, '%%PREFIX%%')) - with open(target, 'w') as f: + with target.open('w') as f: f.write(contents) def unhardcode_paths(self, path): self._unhardcode(path, path + '.template') - shutil.copymode(path, path + '.template') - os.unlink(path) + path.copymode(path + '.template') + path.unlink() - def install(self, source, target): + def install(self, source: PathExt, target: PathExt) -> None: log.info('INSTALL %s->%s', source, target) - target = os.path.join(self._install_directory, target) + target = self._install_directory / target for source_file, target_file in util.walk_files_concurrently( source, target): util.make_parent_dir(target_file) - relpath = os.path.relpath(target_file, self._install_directory) - ext = os.path.splitext(source_file)[1] - if ext in {'.la', '.pc'} and not os.path.islink(source_file): + relpath = str(target_file.relative_to(self._install_directory)) + ext = source_file.suffix + if ext in {'.la', '.pc'} and not source_file.is_symlink(): # Remove references to the installation prefix and the # localbase directory from libtool archives and # pkg-config files. @@ -465,7 +514,8 @@ def install(self, source, target): util.copy_file(source_file, target_file, False) def run(self, cwd, command): - _chdir(cwd) + self._io.chdir(cwd) + subprocess = self._io.subprocess subprocess.check_call([ 'env', '-i', 'AR=' + self._tool('ar'), @@ -480,7 +530,7 @@ def run(self, cwd, command): # List tools directory twice, as certain tools and scripts # get confused if PATH contains no colon. 'PATH=%s:%s' % (self._bindir, self._bindir), - 'PERL=' + config.PERL, + 'PERL=' + str(self._platform(config.PERL)), 'PKG_CONFIG=' + self._tool('pkg-config'), 'RANLIB=' + self._tool('ranlib'), 'STRIP=' + self._tool('strip'), diff --git a/src/config.py b/src/config.py index 40b7eac..2fe2d3c 100644 --- a/src/config.py +++ b/src/config.py @@ -3,8 +3,11 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from pathlib import PurePath from pkgutil import get_data -import platform +from random import Random as RandomT +from typing import Callable, NamedTuple +import string # Architectures for which we can build packages. ARCHITECTURES = { @@ -14,12 +17,8 @@ 'x86_64-unknown-cloudabi', } -# Temporary directory where packages will be built. This directory has -# to be fixed, as the compilation process tends to hardcode paths to the -# build directory. Debug symbols and __FILE__ use absolute paths. -DIR_BUILDROOT = '/usr/obj/cloudabi-ports' - # Resource files. +DIR_RESOURCES = PurePath(__file__).parent / 'misc' RESOURCES = dict((name, get_data('misc', name)) for name in ['config.guess', 'config.sub']) @@ -27,14 +26,46 @@ # are down. FALLBACK_MIRRORS = {'https://nuxi.nl/distfiles/third_party/'} + +# Temporary directory where packages will be built. This directory has +# to be fixed, as the compilation process tends to hardcode paths to the +# build directory. Debug symbols and __FILE__ use absolute paths. +DIR_BUILDROOT = object() + # Host C and C++ compiler, used to compile the build tools. We'd better # use Clang if available. Compared to GCC, it has the advantage that it # does not depend on the 'as' and 'ld' utilities being part of $PATH. -HOST_CC = ('/usr/bin/clang-3.7' if platform.system() == 'Linux' else - '/usr/bin/cc') -HOST_CXX = ('/usr/bin/clang++-3.7' if platform.system() == 'Linux' else - '/usr/bin/c++') +[HOST_CC, HOST_CXX] = [object(), object()] # Name of the Perl executable. -PERL = ('/usr/local/bin/perl' if platform.system() == 'FreeBSD' else - '/usr/bin/perl') +PERL = object() + +RANDOM = object() + +Default = { + DIR_BUILDROOT: '/usr/obj/cloudabi-ports', + HOST_CC: '/usr/bin/cc', + HOST_CXX: '/usr/bin/cc++', + PERL: '/usr/bin/perl'} + +Linux = { + HOST_CC: '/usr/bin/clang-3.7', + HOST_CXX: '/usr/bin/clang++-3.7'} + +FreeBSD = { + PERL: '/usr/local/bin/perl'} + + + +def config_path_fn(system: str, + random: RandomT) -> Callable[[object], str]: + roots = Default.copy() + if system == 'Linux': + roots.update(Linux) + elif system == 'FreeBSD': + roots.update(FreeBSD) + def config_path(key): + if key == RANDOM: + return ''.join( + random.choice(string.ascii_letters) for i in range(16)) + return roots[key] diff --git a/src/distfile.py b/src/distfile.py index 2a3189b..5b47b4e 100644 --- a/src/distfile.py +++ b/src/distfile.py @@ -5,7 +5,7 @@ from hashlib import Hash from pathlib import PurePosixPath -from random import Random +from random import Random as RandomT from shutil import copyfileobj from typing import Any, NamedTuple, cast from urllib.error import URLError @@ -18,14 +18,11 @@ log = logging.getLogger(__name__) -class Access(NamedTuple('Access', [ +class Distfile: + Access = NamedTuple('D_Access', [ ('urlopen', util.UrlopenFn), ('subprocess', util.RunCommand), - ('random', Random)])): - pass - - -class Distfile: + ('random', RandomT)]) def __init__(self, distdir: PathExt, io: Access, name: str, checksum: Hash, master_sites: List[str], @@ -86,17 +83,15 @@ def _apply_patch(patch: PathExt, target: PathExt, if path.suffix == '.orig': path.unlink() - def _extract_unpatched(self, target: PathExt): + def _extract_unpatched(self, target: PathExt) -> PathExt: io = self._io # Fetch and extract tarball. self._fetch() - allfiles = target # KLUDGE - tar = allfiles / config.DIR_BUILDROOT / 'bin/bsdtar' - if not tar.exists(): - tar = 'tar' + platform = target.platform() + tar = platform(config.DIR_BUILDROOT) / 'bin/bsdtar' util.make_dir(target) - io.subprocess.check_call([tar, + io.subprocess.check_call([str(tar) if tar.exists() else 'tar', '-xC', str(target), '-f', str(self._pathname)]) diff --git a/src/package.py b/src/package.py index 27249a5..bcf32eb 100644 --- a/src/package.py +++ b/src/package.py @@ -3,34 +3,35 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. -from typing import Callable, Optional, Set, cast +from typing import Callable, Dict, Optional, Set, cast import logging from . import config from . import util from .builder import BuildDirectory, BuildHandle, HostBuilder, TargetBuilder +from .builder import Access +from .distfile import Distfile from .util import PathExt log = logging.getLogger(__name__) -WHAT = int class HostPackage: def __init__(self, install_directory: PathExt, + io: Access, name: str, version: str, homepage :str, maintainer: str, build_depends: Set[HostPackage], lib_depends: Set[HostPackage], - distfiles: WHAT, - build_cmd: Callable[[BuildHandle], None], - resource_directory: PathExt) -> None: + distfiles: Dict[str, Distfile], + build_cmd: Callable[[BuildHandle], None]) -> None: self._install_directory = install_directory + self._io = io self._name = name self._version = version self._distfiles = distfiles self._build_cmd = build_cmd - self._resource_directory = resource_directory self._build_depends = set() # type: Set[HostPackage] self._lib_depends = set() # type: Set[HostPackage] @@ -52,8 +53,8 @@ def _initialize_buildroot(self) -> None: dep.build() # Install dependencies into an empty buildroot. - allfiles = self._install_directory # KLUDGE - util.remove_and_make_dir(allfiles / config.DIR_BUILDROOT) + platform = self._install_directory.platform() + util.remove_and_make_dir(platform(config.DIR_BUILDROOT)) for dep in deps: dep.extract() @@ -65,17 +66,18 @@ def build(self) -> None: # Perform the build inside an empty buildroot. self._initialize_buildroot() log.info('BUILD %s', self._name) + platform = self._install_directory.platform() self._build_cmd( BuildHandle( - HostBuilder(BuildDirectory(), self._install_directory), - self._name, self._version, self._distfiles, - self._resource_directory)) + HostBuilder(BuildDirectory(platform), self._install_directory, + self._io), + self._name, self._version, self._distfiles, self._io)) def extract(self): # Copy files literally. - allfiles = self._install_directory # KLUDGE + platform = self._install_directory.platform() for source_file, target_file in util.walk_files_concurrently( - self._install_directory, allfiles / config.DIR_BUILDROOT): + self._install_directory, platform(config.DIR_BUILDROOT)): util.make_parent_dir(target_file) util.copy_file(source_file, target_file, False) @@ -83,13 +85,15 @@ def extract(self): class TargetPackage: def __init__(self, install_directory: PathExt, + io: Access, arch: str, name: str, version: str, homepage: str, maintainer: str, host_packages: Dict[str, HostPackage], lib_depends: Set[TargetPackage], build_cmd: Optional[Callable[[BuildHandle], None]], - distfiles: WHAT, resource_directory: PathExt) -> None: + distfiles: Dict[str, Distfile]) -> None: self._install_directory = install_directory + self._io = io self._arch = arch self._name = name self._version = version @@ -98,7 +102,6 @@ def __init__(self, install_directory: PathExt, self._host_packages = host_packages self._build_cmd = build_cmd self._distfiles = distfiles - self._resource_directory = resource_directory # Compute the set of transitive library dependencies. self._lib_depends = set() # type: Set[TargetPackage] @@ -110,7 +113,7 @@ def __init__(self, install_directory: PathExt, def __str__(self): return '%s %s' % (self.get_freebsd_name(), self._version) - def build(self): + def build(self) -> None: # Skip this package if it has been built already. if not self._build_cmd or self._install_directory.is_dir(): return @@ -124,17 +127,17 @@ def build(self): 'llvm', 'm4', 'make', 'ninja', 'pkgconf', 'sed', 'texinfo', }, self._lib_depends) log.info('BUILD %s %s', self._name, self._arch) + platform = self._install_directory.platform() self._build_cmd( BuildHandle( - TargetBuilder(BuildDirectory(), - self._install_directory, self._arch), - self._name, self._version, self._distfiles, - self._resource_directory)) + TargetBuilder(BuildDirectory(platform), + self._install_directory, self._arch, self._io), + self._name, self._version, self._distfiles, self._io)) - def clean(self): + def clean(self) -> None: util.remove(self._install_directory) - def extract(self, path, expandpath): + def extract(self, path: PathExt, expandpath: str) -> None: for source_file, target_file in util.walk_files_concurrently( self._install_directory, path): util.make_parent_dir(target_file) @@ -193,7 +196,7 @@ def get_name(self): def get_version(self): return self._version - def initialize_buildroot(self, host_depends: List[str], + def initialize_buildroot(self, host_depends: Set[str], lib_depends: Set[TargetPackage]=set()) -> None: # Ensure that all dependencies have been built. host_deps = set() @@ -208,10 +211,10 @@ def initialize_buildroot(self, host_depends: List[str], ldep.build() # Install dependencies into an empty buildroot. - allfiles = self._install_directory # KLUDGE - util.remove_and_make_dir(allfiles / config.DIR_BUILDROOT) + platform = self._install_directory.platform() + util.remove_and_make_dir(platform(config.DIR_BUILDROOT)) for dep in host_deps: dep.extract() - prefix = allfiles / config.DIR_BUILDROOT, self._arch + prefix = platform(config.DIR_BUILDROOT) / self._arch for ldep in lib_depends: - ldep.extract(prefix, prefix) + ldep.extract(prefix, str(prefix)) diff --git a/src/util.py b/src/util.py index cb535fa..25e4661 100644 --- a/src/util.py +++ b/src/util.py @@ -8,15 +8,14 @@ from pathlib import Path as PathT, PurePath, PurePosixPath from shutil import copyfileobj from subprocess import PIPE, Popen as PopenT -from typing import (AnyStr, BinaryIO, Callable, Generic, Iterator, - List, Tuple, Type, TypeVar, Union, cast) +from typing import (Any, AnyStr, BinaryIO, Callable, Dict, Generic, Iterator, + List, Tuple, Type, Union, cast) import gzip import hashlib import ssl UrlopenFn = Callable[..., BinaryIO] -Self = TypeVar('Self') _SubPath = Union [str, PurePath] @@ -27,31 +26,34 @@ def Popen(self, args: List[str], **kwargs) -> PopenT: def check_call(self, args: List[str], **kwargs) -> int: raise NotImplementedError + def check_output(self, args: str, **kwargs) -> Any: + raise NotImplementedError + -class PathTFix(Generic[Self], PathT): +class PathExt(PathT): # fix lack of parameter in PurePath type decl # ref https://github.com/python/typeshed/issues/553 - def __truediv__(self, key: _SubPath) -> Self: # type: ignore + def pathjoin(self, *key: _SubPath) -> PathExt: # type: ignore raise NotImplementedError - def iterdir(self) -> Iterator[Self]: # type: ignore + def __truediv__(self, key: _SubPath) -> PathExt: # type: ignore raise NotImplementedError - def relative_to(self, *other: str) -> Self: # type: ignore + def iterdir(self) -> Iterator[PathExt]: # type: ignore raise NotImplementedError - def resolve(self) -> Self: # type: ignore + def relative_to(self, *other: _SubPath) -> PathExt: # type: ignore raise NotImplementedError - def with_name(self, name: str) -> Self: # type: ignore + def resolve(self) -> PathExt: # type: ignore raise NotImplementedError - parent = None # type: Self - + def with_name(self, name: str) -> PathExt: # type: ignore + raise NotImplementedError -class PathExt(PathTFix): + parent = None # type: PathExt - def __add__(self, suffix: str) -> Self: + def __add__(self, suffix: str) -> PathExt: raise NotImplementedError def copy(self, target: PathExt): @@ -72,11 +74,15 @@ def readlink(self) -> AnyStr: def link(self, dst: PathExt): raise NotImplementedError + def platform(self) -> Callable[[object], PathExt]: + raise NotImplementedError + def mix_shutil_path(concrete: Type[PurePosixPath], + config_path: Callable[[str], str], shutil, os_link) -> Type[PathExt]: class PathWithShUtil(concrete, PathExt): # type: ignore - def __add__(self, suffix: str) -> PathWithShUtil: + def __add__(self, suffix: str) -> PathExt: return self.with_name(self.name + suffix) def copy(self, target: PathExt): @@ -98,6 +104,11 @@ def readlink(self): def link(self, dst: PathExt): os_link(str(self), str(dst)) + def platform(self) -> Callable[[object], PathExt]: + allfiles = self / '/' # KLUDGE + def get(key): + return allfiles / config_path(key) + return PathWithShUtil @@ -265,7 +276,8 @@ def walk_files(path: PathExt) -> Iterator[PathExt]: yield path -def walk(path: PathExt): +def walk(path: PathExt) -> Iterator[Tuple[PathExt, List[PathExt], + List[PathExt]]]: def is_dir(p): return p.is_dir() if path.is_dir(): From aa5281ccc0f0032bdcb0ae1ec49e18fb7de5b006 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sun, 18 Sep 2016 16:19:59 -0500 Subject: [PATCH 14/28] src.repository: ocap discipline, static types - static types rather than **kwargs for op_distfile etc. - NamedTuple rather than dict for deferred package info - types for version (sometimes a class), checksum (always str) - class AnyVersion reifies type common to SimpleVersion, FullVersion - master_sites, patches are Set[str], not List[str] - move `build_depends = set()` to else branch to help mypy - clarify builder.Access name - prune more resource_directory dead code --- src/builder.py | 5 +- src/distfile.py | 11 ++- src/package.py | 8 ++- src/repository.py | 175 +++++++++++++++++++++++++++------------------- src/version.py | 8 ++- 5 files changed, 122 insertions(+), 85 deletions(-) diff --git a/src/builder.py b/src/builder.py index 8c194ad..6e89723 100644 --- a/src/builder.py +++ b/src/builder.py @@ -11,6 +11,7 @@ from . import util from .distfile import Distfile from .util import PathExt +from .version import AnyVersion log = logging.getLogger(__name__) @@ -39,7 +40,7 @@ def __exit__(self, type, value, traceback) -> None: self._filename, self._subprocess) -Access = NamedTuple('FH_Access', [ +Access = NamedTuple('B_Access', [ ('subprocess', util.RunCommand), ('chdir', Callable[[PathExt], None]), ('getenv', Callable[[str], str])]) @@ -187,7 +188,7 @@ def unhardcode_paths(self): class BuildHandle: - def __init__(self, builder: Builder, name: str, version: str, + def __init__(self, builder: Builder, name: str, version: AnyVersion, distfiles: Dict[str, Distfile], io: Access) -> None: self._builder = builder diff --git a/src/distfile.py b/src/distfile.py index 5b47b4e..00bb258 100644 --- a/src/distfile.py +++ b/src/distfile.py @@ -3,11 +3,10 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. -from hashlib import Hash from pathlib import PurePosixPath from random import Random as RandomT from shutil import copyfileobj -from typing import Any, NamedTuple, cast +from typing import Any, AbstractSet, NamedTuple, Set, cast from urllib.error import URLError import logging @@ -24,10 +23,10 @@ class Distfile: ('subprocess', util.RunCommand), ('random', RandomT)]) - def __init__(self, distdir: PathExt, io: Access, name: str, checksum: Hash, - master_sites: List[str], - patches: List[PathExt], - unsafe_string_sources: List[str]) -> None: + def __init__(self, distdir: PathExt, io: Access, name: str, checksum: str, + master_sites: Set[str], + patches: Set[PathExt], + unsafe_string_sources: AbstractSet[str]) -> None: for patch in patches: if not patch.exists(): raise Exception('Patch %s does not exist' % patch) diff --git a/src/package.py b/src/package.py index bcf32eb..a9ed37a 100644 --- a/src/package.py +++ b/src/package.py @@ -3,7 +3,7 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. -from typing import Callable, Dict, Optional, Set, cast +from typing import Callable, Dict, NamedTuple, Optional, Set, cast import logging from . import config @@ -12,6 +12,7 @@ from .builder import Access from .distfile import Distfile from .util import PathExt +from .version import AnyVersion log = logging.getLogger(__name__) @@ -21,7 +22,8 @@ class HostPackage: def __init__(self, install_directory: PathExt, io: Access, - name: str, version: str, homepage :str, maintainer: str, + name: str, version: AnyVersion, + homepage :str, maintainer: str, build_depends: Set[HostPackage], lib_depends: Set[HostPackage], distfiles: Dict[str, Distfile], @@ -86,7 +88,7 @@ class TargetPackage: def __init__(self, install_directory: PathExt, io: Access, - arch: str, name: str, version: str, homepage: str, + arch: str, name: str, version: AnyVersion, homepage: str, maintainer: str, host_packages: Dict[str, HostPackage], lib_depends: Set[TargetPackage], diff --git a/src/repository.py b/src/repository.py index 8bdae71..09aeabf 100644 --- a/src/repository.py +++ b/src/repository.py @@ -3,91 +3,114 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. -import os -import random +from typing import (AbstractSet, Callable, Dict, Iterable, NamedTuple, + Optional, Tuple) from . import config +from .builder import BuildHandle, Access as BuildAccess from .distfile import Distfile from .package import HostPackage, TargetPackage -from .version import SimpleVersion +from .util import PathExt +from .version import AnyVersion, SimpleVersion + +PackageInfo = NamedTuple('PackageInfo', [ + ('name', str), + ('version', str), + ('build_cmd', Callable[[BuildHandle], None]), + ('build_depends', Optional[Set[str]]), + ('lib_depends', Optional[Set[str]]), + ('meta', Dict[str, str])]) class Repository: - def __init__(self, install_directory): + def __init__(self, install_directory: PathExt, + io_d: Distfile.Access, io_b: BuildAccess) -> None: self._install_directory = install_directory + self._io_d = io_d + self._io_b = io_b - self._distfiles = {} - self._host_packages = {} - self._target_packages = {} + self._distfiles = {} # type: Dict[str, Distfile] + self._host_packages = {} # type: Dict[str, HostPackage] + self._target_packages = {} # type: Dict[Tuple[str, str], TargetPackage] - self._deferred_host_packages = {} - self._deferred_target_packages = {} + self._deferred_host_packages = {} # type: Dict[str, PackageInfo] + self._deferred_target_packages = {} # type: Dict[Tuple[str, str], PackageInfo] - def add_build_file(self, path, distdir): - def op_build_autoconf_automake(ctx): + def add_build_file(self, path: PathExt, distdir: PathExt) -> None: + def op_build_autoconf_automake(ctx: BuildHandle) -> None: build = ctx.extract().gnu_configure() build.make() build.make_install().install() - def op_distfile(**kwargs): + def op_distfile(name: str, + checksum: str, + master_sites: Set[str], + patches: Optional[AbstractSet[str]]=None, + unsafe_string_sources: Optional[AbstractSet[str]]=None) -> None: # Determine canonical name by stripping the file extension. - distfile = kwargs - name = distfile['name'] for ext in { '.tar.bz2', '.tar.gz', '.tar.lzma', '.tar.xz', '.tgz', '.zip', }: - if distfile['name'].endswith(ext): - name = distfile['name'][:-len(ext)] + if name.endswith(ext): + name = name[:-len(ext)] break # Automatically add patches if none are given. - dirname = os.path.dirname(path) - if 'patches' not in distfile: - distfile['patches'] = (name[6:] - for name in os.listdir(dirname) - if name.startswith('patch-')) - if 'unsafe_string_sources' not in distfile: - distfile['unsafe_string_sources'] = frozenset() + dirname = path.parent + if patches is None: + patches = {p.name[6:] + for p in dirname.iterdir() + if p.name.startswith('patch-')} + if unsafe_string_sources is None: + unsafe_string_sources = frozenset() # Turn patch filenames into full paths. - distfile['patches'] = {os.path.join(dirname, 'patch-' + patch) - for patch in distfile['patches']} + patch_paths = {dirname.pathjoin('patch-' + patch) + for patch in patches} if name in self._distfiles: raise Exception('%s is redeclaring distfile %s' % (path, name)) self._distfiles[name] = Distfile( - distdir=distdir, - **distfile + distdir=distdir, io=self._io_d, + name=name, checksum=checksum, patches=patch_paths, + unsafe_string_sources=unsafe_string_sources, + master_sites=master_sites ) - def op_host_package(**kwargs): - package = kwargs - package['resource_directory'] = os.path.dirname(path) - name = package['name'] + def op_host_package(name: str, + version: str, + build_cmd: Callable[[BuildHandle], None], + build_depends: Optional[Set[str]]=None, + lib_depends: Optional[Set[str]]=None, + **meta) -> None: if name in self._deferred_host_packages: raise Exception('%s is redeclaring packages %s' % (path, name)) - self._deferred_host_packages[name] = package - - def op_package(**kwargs): - package = kwargs - package['resource_directory'] = os.path.dirname(path) - name = package['name'] + self._deferred_host_packages[name] = PackageInfo( + name, version, build_cmd, build_depends, lib_depends, meta) + + def op_package(name: str, + version: str, + build_cmd: Callable[[BuildHandle], None], + build_depends: Optional[Set[str]]=None, + lib_depends: Optional[Set[str]]=None, + **meta) -> None: for arch in config.ARCHITECTURES: if (name, arch) in self._deferred_target_packages: raise Exception( '%s is redeclaring package %s/%s' % (path, arch, name)) - self._deferred_target_packages[(name, arch)] = package + self._deferred_target_packages[(name, arch)] = PackageInfo( + name, version, build_cmd, build_depends, lib_depends, meta) - def op_sites_gnu(suffix): + def op_sites_gnu(suffix: str) -> Set[str]: return {fmt + suffix + '/' for fmt in { 'http://ftp.gnu.org/gnu/', 'http://ftp.nluug.nl/gnu/', }} - def op_sites_sourceforge(suffix): + def op_sites_sourceforge(suffix: str) -> Set[str]: return {fmt + suffix + '/' for fmt in { 'http://downloads.sourceforge.net/project/', 'http://freefr.dl.sourceforge.net/project/', @@ -115,70 +138,78 @@ def op_sites_sourceforge(suffix): 'sites_sourceforge': op_sites_sourceforge, } - with open(path, 'r') as f: + with path.open('r') as f: exec(f.read(), identifiers, identifiers) - def get_distfiles(self): + def get_distfiles(self) -> Iterable[Distfile]: return self._distfiles.values() - def get_target_packages(self): + def get_target_packages(self) -> Dict[Tuple[str, str], TargetPackage]: # Create host packages that haven't been instantiated yet. # This implicitly checks for dependency loops. - def get_host_package(name): + def get_host_package(name: str) -> HostPackage: if name in self._deferred_host_packages: - package = dict(self._deferred_host_packages.pop(name)) + package = self._deferred_host_packages.pop(name) if name in self._host_packages: raise Exception('%s is declared multiple times' % name) - build_depends = set() - if 'build_depends' in package: + if package.build_depends is not None: build_depends = {get_host_package(dep) - for dep in package['build_depends']} - del package['build_depends'] - lib_depends = set() - if 'lib_depends' in package: + for dep in package.build_depends} + else: + build_depends = set() + if package.lib_depends: lib_depends = {get_host_package(dep) - for dep in package['lib_depends']} - del package['lib_depends'] - package['version'] = SimpleVersion(package['version']) + for dep in package.lib_depends} + else: + lib_depends = set() + version = SimpleVersion(package.version) self._host_packages[name] = HostPackage( - install_directory=os.path.join( - self._install_directory, - 'host', + install_directory=( + self._install_directory / + 'host' / name), + io=self._io_b, distfiles=self._distfiles, build_depends=build_depends, lib_depends=lib_depends, - **package) + name=package.name, version=version, + build_cmd=package.build_cmd, + homepage=package.meta.get('homepage'), + maintainer=package.meta.get('maintainer')) return self._host_packages[name] while self._deferred_host_packages: + random = self._io_d.random get_host_package( random.sample( self._deferred_host_packages.keys(), 1)[0]) # Create target packages that haven't been instantiated yet. - def get_target_package(name, arch): + def get_target_package(name: str, arch: str) -> TargetPackage: if (name, arch) in self._deferred_target_packages: - package = dict( - self._deferred_target_packages.pop((name, arch))) + package = self._deferred_target_packages.pop((name, arch)) if (name, arch) in self._target_packages: raise Exception('%s is declared multiple times' % name) - lib_depends = set() if 'lib_depends' in package: lib_depends = { get_target_package( - dep, arch) for dep in package['lib_depends']} - del package['lib_depends'] - package['version'] = SimpleVersion(package['version']) + dep, arch) for dep in package.lib_depends} + else: + lib_depends = set() + version = SimpleVersion(package.version) self._target_packages[(name, arch)] = TargetPackage( - install_directory=os.path.join( - self._install_directory, arch, name), + install_directory=( + self._install_directory / arch / name), + io=self._io_b, arch=arch, distfiles=self._distfiles, host_packages=self._host_packages, lib_depends=lib_depends, - **package) + name=package.name, version=version, + build_cmd=package.build_cmd, + homepage=package.meta.get('homepage'), + maintainer=package.meta.get('maintainer')) return self._target_packages[(name, arch)] while self._deferred_target_packages: @@ -192,8 +223,9 @@ def get_target_package(name, arch): packages = self._target_packages.copy() for arch in config.ARCHITECTURES: packages[('everything', arch)] = TargetPackage( - install_directory=os.path.join(self._install_directory, arch, - 'everything'), + install_directory=(self._install_directory / arch / + 'everything'), + io=self._io_b, arch=arch, name='everything', version=SimpleVersion('1.0'), @@ -206,6 +238,5 @@ def get_target_package(name, arch): if key[1] == arch }, build_cmd=None, - distfiles={}, - resource_directory=None) + distfiles={}) return packages diff --git a/src/version.py b/src/version.py index 92fafb6..6d92d1c 100644 --- a/src/version.py +++ b/src/version.py @@ -4,7 +4,11 @@ # See the LICENSE file for details. -class SimpleVersion: +class AnyVersion: + pass + + +class SimpleVersion(AnyVersion): def __init__(self, version): # Turn the numbers into a list of integer values. @@ -24,7 +28,7 @@ def __str__(self): return '.'.join(str(part) for part in self._numbers) -class FullVersion: +class FullVersion(AnyVersion): def __init__(self, epoch=0, version=SimpleVersion('0'), revision=1): self._epoch = epoch From 32160de5a782f3a2aff3a8728fdfbd88dbd52d69 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sun, 18 Sep 2016 18:07:49 -0500 Subject: [PATCH 15/28] add access to /usr andn /usr/local to platform --- src/config.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/config.py b/src/config.py index 2fe2d3c..64abe10 100644 --- a/src/config.py +++ b/src/config.py @@ -41,8 +41,12 @@ PERL = object() RANDOM = object() +USR = object() +USR_LOCAL = object() Default = { + USR: '/usr', + USR_LOCAL: '/usr/local', DIR_BUILDROOT: '/usr/obj/cloudabi-ports', HOST_CC: '/usr/bin/cc', HOST_CXX: '/usr/bin/cc++', From 78fa34976ebe9be7e39ab06cee3d00188cf49268 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sun, 18 Sep 2016 18:08:19 -0500 Subject: [PATCH 16/28] src.catalog: ocap discipline, static types (WIP) --- src/catalog.py | 351 +++++++++++++++++++++++++------------------------ 1 file changed, 180 insertions(+), 171 deletions(-) diff --git a/src/catalog.py b/src/catalog.py index 2a6118a..1fb037c 100644 --- a/src/catalog.py +++ b/src/catalog.py @@ -3,36 +3,42 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from subprocess import PIPE +from time import strftime, struct_time +from typing import Callable, Optional, Set, Tuple import base64 import collections import hashlib import logging import lzma import math -import os import re -import shutil import stat -import subprocess -import time from . import config from . import rpm from . import util -from .version import FullVersion, SimpleVersion +from .package import TargetPackage as TPkg +from .util import PathExt +from .version import FullVersion log = logging.getLogger(__name__) +_ClockFn = Callable[[], struct_time] + + class Catalog: - def __init__(self, old_path, new_path): + def __init__(self, old_path: PathExt, new_path: PathExt, + subprocess: util.RunCommand) -> None: self._old_path = old_path self._new_path = new_path - self._packages = set() + self._packages = set() # type: Set[Tuple[str, FullVersion]] + self._subprocess = subprocess @staticmethod - def _get_suggested_mode(path): - mode = os.lstat(path).st_mode + def _get_suggested_mode(path: PathExt) -> int: + mode = path.lstat().st_mode if stat.S_ISLNK(mode): # Symbolic links. return 0o777 @@ -44,36 +50,40 @@ def _get_suggested_mode(path): return 0o444 @staticmethod - def _sanitize_permissions(directory, directory_mode=0o555): - for root, dirs, files in os.walk(directory): - util.lchmod(root, directory_mode) + def _sanitize_permissions(directory: PathExt, directory_mode=0o555) -> None: + for root, dirs, files in util.walk(directory): + root.lchmod(directory_mode) for filename in files: - path = os.path.join(root, filename) - util.lchmod(path, Catalog._get_suggested_mode(path)) + path = root / filename + path.lchmod(Catalog._get_suggested_mode(path)) - @staticmethod - def _run_tar(args): + def _run_tar(self, args: List[str]) -> None: + subprocess = self._subprocess + platform = self._old_path.platform() subprocess.check_call([ - os.path.join(config.DIR_BUILDROOT, 'bin/bsdtar') + str(platform(config.DIR_BUILDROOT) / 'bin/bsdtar') ] + args) - def insert(self, package, version, source): - target = os.path.join( - self._new_path, self._get_filename(package, version)) + def insert(self, package, version, source) -> None: + target = ( + self._new_path / self._get_filename(package, version)) util.make_dir(self._new_path) util.remove(target) - os.link(source, target) + source.link(target) self._packages.add((package, version)) - def lookup_at_version(self, package, version): + def lookup_at_version(self, package: TPkg, version: FullVersion) -> Optional[PathExt]: if self._old_path: - path = os.path.join( - self._old_path, + path = ( + self._old_path / self._get_filename(package, version)) - if os.path.exists(path): + if path.exists(): return path return None + def _get_filename(self, package: TPkg, version: FullVersion) -> str: + raise NotImplementedError('subclass must implement') + class DebianCatalog(Catalog): @@ -85,31 +95,34 @@ class DebianCatalog(Catalog): 's390x', 'sparc' } - def __init__(self, old_path, new_path): - super(DebianCatalog, self).__init__(old_path, new_path) + def __init__(self, old_path: PathExt, new_path: PathExt, + subprocess: util.RunCommand, + gmtime: _ClockFn) -> None: + super(DebianCatalog, self).__init__(old_path, new_path, subprocess) + self._gmtime = gmtime # Scan the existing directory hierarchy to find the latest # version of all of the packages. We need to know this in order # to determine the Epoch and revision number for any new # packages we're going to build. - self._existing = collections.defaultdict(FullVersion) + self._existing = collections.defaultdict(FullVersion) # type: Dict[str, FullVersion] if old_path: - for root, dirs, files in os.walk(old_path): - for filename in files: - parts = filename.split('_') + for root, dirs, files in util.walk(old_path): + for file in files: + parts = file.name.split('_') if len(parts) == 3 and parts[2] == 'all.deb': name = parts[0] version = FullVersion.parse_debian(parts[1]) if self._existing[name] < version: self._existing[name] = version - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s_%s_all.deb' % ( package.get_debian_name(), version.get_debian_version()) @staticmethod - def _get_control_snippet(package, version, installed_size=None): + def _get_control_snippet(package: TPkg, version: FullVersion, + installed_size=None) -> str: """Returns a string suitable for writing to a .deb control file. For the fields refer to the Debian Policy Manual @@ -140,51 +153,51 @@ def _get_control_snippet(package, version, installed_size=None): dep.get_debian_name() for dep in lib_depends)) return snippet - def finish(self, private_key): + def finish(self, private_key: str) -> None: # Create package index. - def write_entry(f, package, version): + def write_entry(f, packageTPkg, version: FullVersion): f.write(self._get_control_snippet(package, version)) filename = self._get_filename(package, version) - path = os.path.join(self._new_path, filename) + path = self._new_path / filename f.write( 'Filename: %s\n' 'Size: %u\n' 'SHA256: %s\n' % ( filename, - os.path.getsize(path), + path.stat().st_size, util.sha256(path).hexdigest(), )) f.write('\n') - index = os.path.join(self._new_path, 'Packages') - with open(index, 'wt') as f, lzma.open(index + '.xz', 'wt') as f_xz: + index = self._new_path / 'Packages' + with index.open('wt') as f, lzma.open(index + '.xz', 'wt') as f_xz: for package, version in self._packages: write_entry(f, package, version) write_entry(f_xz, package, version) # Link the index into the per-architecture directory. + (subprocess, gmtime) = self._subprocess, self._gmtime for arch in self._architectures: - index_arch = os.path.join( - self._new_path, + index_arch = self._new_path.pathjoin( 'dists/cloudabi/cloudabi/binary-%s/Packages' % arch) util.make_parent_dir(index_arch) - os.link(index, index_arch) - os.link(index + '.xz', index_arch + '.xz') + index.link(index_arch) + (index + '.xz').link(index_arch + '.xz') checksum = util.sha256(index).hexdigest() checksum_xz = util.sha256(index + '.xz').hexdigest() - size = os.path.getsize(index) - size_xz = os.path.getsize(index + '.xz') - os.unlink(index) - os.unlink(index + '.xz') + size = index.stat().st_size + size_xz = (index + '.xz').stat().st_size + index.unlink() + (index + '.xz').unlink() # Create the InRelease file. - with open( - os.path.join(self._new_path, 'dists/cloudabi/InRelease'), 'w' + with ( + (self._new_path / 'dists/cloudabi/InRelease').open('w') ) as f, subprocess.Popen([ 'gpg', '--local-user', private_key, '--armor', '--sign', '--clearsign', '--digest-algo', 'SHA256', - ], stdin=subprocess.PIPE, stdout=f) as proc: - def append(text): + ], stdin=PIPE, stdout=f) as proc: + def append(text: str): proc.stdin.write(bytes(text, encoding='ASCII')) append( 'Suite: cloudabi\n' @@ -193,7 +206,7 @@ def append(text): 'Date: %s\n' 'SHA256:\n' % ( ' '.join(sorted(self._architectures)), - time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime()))) + strftime("%a, %d %b %Y %H:%M:%S UTC", gmtime()))) for arch in sorted(self._architectures): append(' %s %d cloudabi/binary-%s/Packages\n' % (checksum, size, arch)) @@ -203,99 +216,103 @@ def append(text): def lookup_latest_version(self, package): return self._existing[package.get_debian_name()] - def package(self, package, version): + def package(self, package: TPkg, version: FullVersion) -> PathExt: + subprocess = self._subprocess package.build() package.initialize_buildroot({'libarchive', 'llvm'}) log.info('PKG %s', self._get_filename(package, version)) - rootdir = config.DIR_BUILDROOT - debian_binary = os.path.join(rootdir, 'debian-binary') - controldir = os.path.join(rootdir, 'control') - datadir = os.path.join(rootdir, 'data') + platform = self._old_path.platform() + rootdir = platform(config.DIR_BUILDROOT) + debian_binary = rootdir / 'debian-binary' + controldir = rootdir / 'control' + datadir = rootdir / 'data' # Create 'debian-binary' file. - with open(debian_binary, 'w') as f: + with debian_binary.open('w') as f: f.write('2.0\n') - def tar(directory): + def tar(directory: PathExt): self._sanitize_permissions(directory) self._run_tar([ - '-cJf', directory + '.tar.xz', - '-C', directory, + '-cJf', str(directory + '.tar.xz'), + '-C', str(directory), '.', ]) # Create 'data.tar.xz' tarball that contains the files that need # to be installed by the package. - prefix = os.path.join('/usr', package.get_arch()) + prefix = platform(config.USR) / package.get_arch() util.make_dir(datadir) - package.extract(os.path.join(datadir, prefix[1:]), prefix) + package.extract(datadir / prefix.relative_to(prefix.root), prefix) tar(datadir) # Create 'control.tar.xz' tarball that contains the control files. util.make_dir(controldir) datadir_files = sorted(util.walk_files(datadir)) - datadir_size = sum(os.path.getsize(fpath) for fpath in datadir_files) - with open(os.path.join(controldir, 'control'), 'w') as f: + datadir_size = sum(fpath.stat().st_size for fpath in datadir_files) + with (controldir / 'control').open('w') as f: f.write(self._get_control_snippet(package, version, datadir_size)) - with open(os.path.join(controldir, 'md5sums'), 'w') as f: + with (controldir / 'md5sums').open('w') as f: f.writelines('%s %s\n' % (util.md5(fpath).hexdigest(), - os.path.relpath(fpath, datadir)) + fpath.relative_to(datadir)) for fpath in datadir_files) tar(controldir) - path = os.path.join(rootdir, 'output.txz') - subprocess.check_call([ - os.path.join(rootdir, 'bin/llvm-ar'), 'rc', path, + path = rootdir / 'output.txz' + subprocess.check_call([str(arg) for arg in [ + rootdir / 'bin/llvm-ar', 'rc', path, debian_binary, controldir + '.tar.xz', datadir + '.tar.xz', - ]) + ]]) return path class FreeBSDCatalog(Catalog): - def __init__(self, old_path, new_path): - super(FreeBSDCatalog, self).__init__(old_path, new_path) + def __init__(self, old_path: PathExt, new_path: PathExt, + subprocess: util.RunCommand) -> None: + super(FreeBSDCatalog, self).__init__(old_path, new_path, subprocess) # Scan the existing directory hierarchy to find the latest # version of all of the packages. We need to know this in order # to determine the Epoch and revision number for any new # packages we're going to build. - self._existing = collections.defaultdict(FullVersion) + self._existing = collections.defaultdict(FullVersion) # type: Dict[str, FullVersion] if old_path: - for root, dirs, files in os.walk(old_path): - for filename in files: - parts = filename.rsplit('-', 1) + for root, dirs, files in util.walk(old_path): + for file in files: + parts = file.name.rsplit('-', 1) if len(parts) == 2 and parts[1].endswith('.txz'): name = parts[0] version = FullVersion.parse_freebsd(parts[1][:-4]) if self._existing[name] < version: self._existing[name] = version - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s-%s.txz' % (package.get_freebsd_name(), version.get_freebsd_version()) - def finish(self, private_key): + def finish(self, private_key: str): + subprocess = self._subprocess subprocess.check_call([ - 'pkg', 'repo', self._new_path, private_key, + 'pkg', 'repo', str(self._new_path), private_key, ]) # TODO(ed): Copy in some of the old files to keep clients happy. - def lookup_latest_version(self, package): + def lookup_latest_version(self, package: TPkg) -> FullVersion: return self._existing[package.get_freebsd_name()] - def package(self, package, version): + def package(self, package: TPkg, version: FullVersion) -> PathExt: package.build() package.initialize_buildroot({'libarchive'}) log.info('PKG %s', self._get_filename(package, version)) # The package needs to be installed in /usr/local/ on the # FreeBSD system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + platform = self._old_path.platform() + installdir = platform(config.DIR_BUILDROOT) / 'install' arch = package.get_arch() - prefix = os.path.join('/usr/local', arch) + prefix = platform(config.USR_LOCAL) / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) @@ -313,7 +330,7 @@ def package(self, package, version): '"flatsize":%(flatsize)d,' '"desc":"%(name)s for %(arch)s"' % { 'arch': arch, - 'flatsize': sum(os.lstat(path).st_size for path in files), + 'flatsize': sum(path.lstat().st_size for path in files), 'freebsd_name': package.get_freebsd_name(), 'homepage': package.get_homepage(), 'maintainer': package.get_maintainer(), @@ -326,21 +343,21 @@ def package(self, package, version): '\"%s\":{"origin":"devel/%s","version":"0"}' % (dep, dep) for dep in sorted(pkg.get_freebsd_name() for pkg in deps) ) - compact_manifest = os.path.join(config.DIR_BUILDROOT, + compact_manifest = platform(config.DIR_BUILDROOT).pathjoin( '+COMPACT_MANIFEST') - with open(compact_manifest, 'w') as f: + with compact_manifest.open('w') as f: f.write(base_manifest) f.write('}') # Create the fill manifest. if files: - manifest = os.path.join(config.DIR_BUILDROOT, '+MANIFEST') - with open(manifest, 'w') as f: + manifest = platform(config.DIR_BUILDROOT) / '+MANIFEST' + with manifest.open('w') as f: f.write(base_manifest) f.write(',"files":{') f.write(','.join( '"%s":"1$%s"' % ( - os.path.join(prefix, os.path.relpath(path, installdir)), + prefix.pathjoin(path.relative_to(installdir)), util.sha256(path).hexdigest()) for path in files)) f.write('}}') @@ -348,9 +365,9 @@ def package(self, package, version): manifest = compact_manifest # Create the package. - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = platform(config.DIR_BUILDROOT) / 'output.tar.xz' + listing = platform(config.DIR_BUILDROOT) / 'listing' + with listing.open('w') as f: # Leading files in tarball. f.write('#mtree\n') f.write( @@ -360,18 +377,18 @@ def package(self, package, version): '+MANIFEST type=file mode=0644 uname=root gname=wheel time=0 contents=%s\n' % manifest) for path in files: - fullpath = os.path.join(prefix, os.path.relpath(path, installdir)) - if os.path.islink(path): + fullpath = prefix.pathjoin(path.relative_to(installdir)) + if path.is_symlink(): # Symbolic links. f.write( '%s type=link mode=0777 uname=root gname=wheel time=0 link=%s\n' % - (fullpath, os.readlink(path))) + (fullpath, path.readlink())) else: # Regular files. f.write( '%s type=file mode=0%o uname=root gname=wheel time=0 contents=%s\n' % (fullpath, self._get_suggested_mode(path), path)) - self._run_tar(['-cJf', output, '-C', installdir, '@' + listing]) + self._run_tar(['-cJf', str(output), '-C', str(installdir), '@' + str(listing)]) return output @@ -379,27 +396,27 @@ class HomebrewCatalog(Catalog): _OSX_VERSIONS = {'el_capitan', 'mavericks', 'yosemite'} - def __init__(self, old_path, new_path, url): - super(HomebrewCatalog, self).__init__(old_path, new_path) + def __init__(self, old_path: PathExt, new_path: PathExt, url: str, + subprocess: util.RunCommand) -> None: + super(HomebrewCatalog, self).__init__(old_path, new_path, subprocess) self._url = url # Scan the existing directory hierarchy to find the latest # version of all of the packages. We need to know this in order # to determine the Epoch and revision number for any new # packages we're going to build. - self._existing = collections.defaultdict(FullVersion) + self._existing = collections.defaultdict(FullVersion) # type: Dict[str, FullVersion] if old_path: - for root, dirs, files in os.walk(old_path): - for filename in files: - parts = filename.split('|', 1) + for root, dirs, files in util.walk(old_path): + for file in files: + parts = file.name.split('|', 1) if len(parts) == 2: name = parts[0] version = FullVersion.parse_homebrew(parts[1]) if self._existing[name] < version: self._existing[name] = version - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s|%s' % (package.get_homebrew_name(), version.get_homebrew_version()) @@ -413,10 +430,10 @@ def insert(self, package, version, source): # Create symbolic to the tarball for every supported version of # Mac OS X. filename = self._get_filename(package, version) - linksdir = os.path.join(self._new_path, 'links') + linksdir = self._new_path / 'links' util.make_dir(linksdir) for osx_version in self._OSX_VERSIONS: - link = os.path.join(linksdir, + link = linksdir.pathjoin( '%s-%s.%s.bottle.tar.gz' % ( package.get_homebrew_name(), version.get_homebrew_version(), osx_version)) @@ -426,8 +443,8 @@ def insert(self, package, version, source): # Create a formula. formulaedir = os.path.join(self._new_path, 'formulae') util.make_dir(formulaedir) - with open(os.path.join(formulaedir, - package.get_homebrew_name() + '.rb'), 'w') as f: + with formulaedir.pathjoin( + package.get_homebrew_name() + '.rb').open('w') as f: # Header. f.write("""class %(homebrew_class)s < Formula desc "%(name)s for %(arch)s" @@ -482,7 +499,7 @@ def package(self, package, version): # Add a placeholder install receipt file. Homebrew depends on it # being present with at least these fields. - with open(os.path.join(extractdir, 'INSTALL_RECEIPT.json'), 'w') as f: + with (extractdir / 'INSTALL_RECEIPT.json').join('w') as f: f.write('{"used_options":[],"unused_options":[]}\n') # Archive the results. @@ -497,11 +514,7 @@ def package(self, package, version): class NetBSDCatalog(Catalog): - def __init__(self, old_path, new_path): - super(NetBSDCatalog, self).__init__(old_path, new_path) - - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s-%s.tgz' % (package.get_netbsd_name(), version.get_netbsd_version()) @@ -524,7 +537,7 @@ def package(self, package, version): # Package contents list. util.make_dir(installdir) - with open(os.path.join(installdir, '+CONTENTS'), 'w') as f: + with (installdir / '+CONTENTS').open('w') as f: f.write( '@cwd /usr/pkg/%s\n' '@name %s-%s\n' % ( @@ -537,9 +550,9 @@ def package(self, package, version): f.write(os.path.relpath(path, installdir) + '\n') # Package description. - with open(os.path.join(installdir, '+COMMENT'), 'w') as f: + with (installdir / '+COMMENT').open('w') as f: f.write('%s for %s\n' % (package.get_name(), package.get_arch())) - with open(os.path.join(installdir, '+DESC'), 'w') as f: + with (installdir / '+DESC').open('w') as f: f.write( '%(name)s for %(arch)s\n' '\n' @@ -556,7 +569,7 @@ def package(self, package, version): # system, meaning that these packages are currently only # installable on NetBSD/x86-64. Figure out a way we can create # packages that are installable on any system that uses pkgsrc. - with open(os.path.join(installdir, '+BUILD_INFO'), 'w') as f: + with (installdir / '+BUILD_INFO').open('w') as f: f.write( 'MACHINE_ARCH=x86_64\n' 'PKGTOOLS_VERSION=00000000\n' @@ -567,7 +580,7 @@ def package(self, package, version): self._sanitize_permissions(installdir) output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + with listing.open('w') as f: f.write('+CONTENTS\n+COMMENT\n+DESC\n+BUILD_INFO\n') for path in files: f.write(os.path.relpath(path, installdir) + '\n') @@ -577,11 +590,7 @@ def package(self, package, version): class OpenBSDCatalog(Catalog): - def __init__(self, old_path, new_path): - super(OpenBSDCatalog, self).__init__(old_path, new_path) - - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s-%s.tgz' % (package.get_openbsd_name(), version.get_openbsd_version()) @@ -604,7 +613,7 @@ def package(self, package, version): # Package contents list. contents = os.path.join(config.DIR_BUILDROOT, 'contents') - with open(contents, 'w') as f: + with contents.open('w') as f: f.write( '@name %s-%s\n' '@cwd %s\n' % ( @@ -640,7 +649,7 @@ def package(self, package, version): # Package description. desc = os.path.join(config.DIR_BUILDROOT, 'desc') - with open(desc, 'w') as f: + with desc.open('w') as f: f.write( '%(name)s for %(arch)s\n' '\n' @@ -657,7 +666,7 @@ def package(self, package, version): output = os.path.join(config.DIR_BUILDROOT, 'output.tar.gz') listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + with listing.open('w') as f: # Leading files in tarball. f.write('#mtree\n') f.write( @@ -686,22 +695,24 @@ def package(self, package, version): class ArchLinuxCatalog(Catalog): - def __init__(self, old_path, new_path): - super(ArchLinuxCatalog, self).__init__(old_path, new_path) + def __init__(self, old_path: PathExt, new_path: PathExt, + subprocess: util.RunCommand, + chdir: Callable[[PathExt], None]) -> None: + super(ArchLinuxCatalog, self).__init__(old_path, new_path, subprocess) + self._chdir = chdir - self._existing = collections.defaultdict(FullVersion) + self._existing = collections.defaultdict(FullVersion) # type: Dict[str, FullVersion] if old_path: - for root, dirs, files in os.walk(old_path): - for filename in files: - parts = filename.rsplit('-', 3) + for root, dirs, files in util.walk(old_path): + for file in files: + parts = file.name.rsplit('-', 3) if len(parts) == 4 and parts[3] == 'any.pkg.tar.xz': name = parts[0] version = FullVersion.parse_archlinux(parts[1] + '-' + parts[2]) if self._existing[name] < version: self._existing[name] = version - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s-%s-any.pkg.tar.xz' % (package.get_archlinux_name(), version.get_archlinux_version()) @@ -725,7 +736,7 @@ def package(self, package, version): util.make_dir(installdir) pkginfo = os.path.join(installdir, '.PKGINFO') - with open(pkginfo, 'w') as f: + with pkginfo.open('w') as f: f.write( 'pkgname = %(archlinux_name)s\n' 'pkgdesc = %(name)s for %(arch)s\n' @@ -745,14 +756,14 @@ def package(self, package, version): output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + with listing.open('w') as f: f.write('.PKGINFO\n') for path in files: f.write(os.path.relpath(path, installdir) + '\n') mtree = os.path.join(installdir, '.MTREE') - with open(listing, 'w') as f: + with listing.open('w') as f: f.write('#mtree\n') f.write( '.PKGINFO type=file mode=0644 uname=root gname=root time=0 contents=%s\n' % pkginfo) @@ -782,69 +793,71 @@ def package(self, package, version): return output def finish(self, private_key): + subprocess = self._subprocess for package, version in self._packages: package_file = self._get_filename(package, version) subprocess.check_call([ 'gpg', '--detach-sign', '--local-user', private_key, '--no-armor', '--digest-algo', 'SHA256', - os.path.join(self._new_path, package_file)]) - db_file = os.path.join(self._new_path, 'cloudabi-ports.db.tar.xz') - packages = [os.path.join(self._new_path, self._get_filename(*p)) for p in self._packages] + self._new_path / package_file]) + db_file = self._new_path / 'cloudabi-ports.db.tar.xz' + packages = [self._new_path / self._get_filename(*p) + for p in self._packages] # Ensure that repo-add as a valid working directory. - os.chdir('/') + self._chdir('/') subprocess.check_call(['repo-add', '-s', '-k', private_key, db_file] + packages) class CygwinCatalog(Catalog): - def __init__(self, old_path, new_path): - super(CygwinCatalog, self).__init__(old_path, new_path) + def __init__(self, old_path: PathExt, new_path: PathExt, + subprocess: util.RunCommand) -> None: + super(CygwinCatalog, self).__init__(old_path, new_path, subprocess) - self._existing = collections.defaultdict(FullVersion) + self._existing = collections.defaultdict(FullVersion) # type: Dict[str, FullVersion] if old_path: - for root, dirs, files in os.walk(old_path): - for filename in files: - if filename.endswith('.tar.xz'): - parts = filename[:-7].rsplit('-', 2) + for root, dirs, files in util.walk(old_path): + for file in files: + if file.name.endswith('.tar.xz'): + parts = file.name[:-7].rsplit('-', 2) if len(parts) == 3: name = parts[0] version = FullVersion.parse_cygwin(parts[1] + '-' + parts[2]) if self._existing[name] < version: self._existing[name] = version - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s-%s.tar.xz' % (package.get_cygwin_name(), version.get_cygwin_version()) def lookup_latest_version(self, package): return self._existing[package.get_cygwin_name()] - def package(self, package, version): + def package(self, package: TPkg, version: FullVersion): package.build() package.initialize_buildroot({'libarchive'}) log.info('PKG %s', self._get_filename(package, version)) - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + platform = self._old_path.platform() + installdir = platform(config.DIR_BUILDROOT) / 'install' arch = package.get_arch() - prefix = os.path.join('/usr', arch) - package.extract(os.path.join(installdir, prefix[1:]), prefix) - files = sorted(util.walk_files(installdir)) + prefix = platform(config.USR) / arch + package.extract(installdir / prefix.relative_to(prefix.root), prefix) util.make_dir(installdir) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') + output = platform(config.DIR_BUILDROOT) / 'output.tar.xz' - self._run_tar(['-cJf', output, '-C', installdir, '.']) + self._run_tar(['-cJf', str(output), '-C', str(installdir), '.']) return output - def finish(self, private_key): + def finish(self, private_key: str): for cygwin_arch in ('x86', 'x86_64'): cygwin_arch_dir = os.path.join(self._new_path, cygwin_arch) util.make_dir(cygwin_arch_dir) setup_file = os.path.join(cygwin_arch_dir, 'setup.ini') - with open(setup_file, 'w') as f: + with setup_file.open('w') as f: f.write('release: cygwin\n') f.write('arch: %s\n' % cygwin_arch) f.write('setup-timestamp: %d\n' % int(time.time())) @@ -885,11 +898,7 @@ def finish(self, private_key): class RedHatCatalog(Catalog): - def __init__(self, old_path, new_path): - super(RedHatCatalog, self).__init__(old_path, new_path) - - @staticmethod - def _get_filename(package, version): + def _get_filename(self, package: TPkg, version: FullVersion) -> str: return '%s-%s.noarch.rpm' % (package.get_redhat_name(), version.get_redhat_version()) @@ -946,7 +955,7 @@ def package(self, package, version): # Create an xz compressed cpio payload containing all files. listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + with listing.open('w') as f: f.write('#mtree\n') for path in files: relpath = os.path.join(prefix, os.path.relpath(path, installdir)) @@ -1024,7 +1033,7 @@ def package(self, package, version): # Create the RPM file. output = os.path.join(config.DIR_BUILDROOT, 'output.rpm') - with open(output, 'wb') as f: + with output.open('wb') as f: # The lead. f.write(b'\xed\xab\xee\xdb\x03\x00\x00\x00\x00\x00') fullname = '%s-%s' % (name, version.get_redhat_version()) @@ -1040,7 +1049,7 @@ def package(self, package, version): f.write(header) # The payload. - with open(data, 'rb') as fin: + with data.open('rb') as fin: shutil.copyfileobj(fin, f) return output From 26d3597a8a8f1dbb5fd8e4d7eb2911baf2738f02 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 11:26:08 -0500 Subject: [PATCH 17/28] distfile: PEP8 style - two blank lines before class, def - 4 space indent - 79 character line limit --- src/distfile.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/src/distfile.py b/src/distfile.py index 05d359b..2fb7b05 100644 --- a/src/distfile.py +++ b/src/distfile.py @@ -15,6 +15,7 @@ log = logging.getLogger(__name__) + class Distfile: def __init__(self, distdir, name, checksum, master_sites, patches, @@ -46,7 +47,8 @@ def _apply_patch(patch, target): with patch.open('rb') as f: for l in f.readlines(): if l.startswith(b'--- '): - filename = str(l[4:-1].split(b'\t', 1)[0], encoding='ASCII') + filename = str(l[4:-1].split(b'\t', 1)[0], + encoding='ASCII') while True: if (target / filename).exists(): # Correct patchlevel determined. @@ -105,7 +107,8 @@ def _fetch(self): log.info('FETCH %s', url) try: util.make_parent_dir(self._pathname) - with util.unsafe_fetch(url) as fin, self._pathname.open('wb') as fout: + with util.unsafe_fetch(url) as fin, \ + self._pathname.open('wb') as fout: shutil.copyfileobj(fin, fout) except ConnectionResetError as e: log.warning(e) @@ -120,12 +123,12 @@ def extract(self, target): self._apply_patch(patch, target) # Add markers to sources that depend on unsafe string sources. for filename in self._unsafe_string_sources: - path = target / filename - with path.open('rb') as fin, (path + '.new').open('wb') as fout: - fout.write(bytes('#define _CLOUDLIBC_UNSAFE_STRING_FUNCTIONS\n', - encoding='ASCII')) - fout.write(fin.read()) - (path + '.new').rename(path) + path = target / filename + with path.open('rb') as fin, (path + '.new').open('wb') as fout: + fout.write(bytes('#define _CLOUDLIBC_UNSAFE_STRING_FUNCTIONS\n', + encoding='ASCII')) + fout.write(fin.read()) + (path + '.new').rename(path) return target def fixup_patches(self, tmpdir): From 1c8d9b0c0629adc7fd7fa4996411f113790f2cda Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 13:32:59 -0500 Subject: [PATCH 18/28] src.package: prune unused imports --- src/package.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/package.py b/src/package.py index 300b692..d6badb0 100644 --- a/src/package.py +++ b/src/package.py @@ -6,8 +6,6 @@ import logging import os import shutil -import stat -import subprocess from . import config from . import util From 3c60d521dd5471bb1e7b5c5e60ee5ba58c1efc59 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sat, 17 Sep 2016 13:33:24 -0500 Subject: [PATCH 19/28] package: style: 2 blank lines before class --- src/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/package.py b/src/package.py index d6badb0..e6ba140 100644 --- a/src/package.py +++ b/src/package.py @@ -13,6 +13,7 @@ log = logging.getLogger(__name__) + class HostPackage: def __init__(self, install_directory, name, version, homepage, From e40cb5b330889edca5dfc7e964327098e040a391 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sun, 18 Sep 2016 14:48:18 -0500 Subject: [PATCH 20/28] src/catalog: style tweaks; prune unused items - unused: SimpleVersion, lib_depends, files - two lines before class - spaces around / and after : - }); formatting --- src/catalog.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/catalog.py b/src/catalog.py index 1af675b..c705391 100644 --- a/src/catalog.py +++ b/src/catalog.py @@ -19,10 +19,11 @@ from . import config from . import rpm from . import util -from .version import FullVersion, SimpleVersion +from .version import FullVersion log = logging.getLogger(__name__) + class Catalog: def __init__(self, old_path, new_path): @@ -132,7 +133,7 @@ def _get_control_snippet(package, version, installed_size=None): # Optional, estimate in kB of disk space needed to install the package if installed_size is not None: - snippet += 'Installed-Size: %d\n' % math.ceil(installed_size/1024) + snippet += 'Installed-Size: %d\n' % math.ceil(installed_size / 1024) lib_depends = package.get_lib_depends() if lib_depends: @@ -684,6 +685,7 @@ def package(self, package, version): ]) return output + class ArchLinuxCatalog(Catalog): def __init__(self, old_path, new_path): @@ -739,7 +741,6 @@ def package(self, package, version): 'version': version.get_archlinux_version(), } ) - lib_depends = package.get_lib_depends() for dep in sorted(pkg.get_archlinux_name() for pkg in package.get_lib_depends()): f.write('depend = %s\n' % dep) @@ -829,7 +830,6 @@ def package(self, package, version): arch = package.get_arch() prefix = '/usr' / arch package.extract(installdir / prefix[1:], prefix) - files = sorted(util.walk_files(installdir)) util.make_dir(installdir) @@ -849,7 +849,7 @@ def finish(self, private_key): f.write('arch: %s\n' % cygwin_arch) f.write('setup-timestamp: %d\n' % int(time.time())) for package, version in sorted(self._packages, - key=lambda p:p[0].get_cygwin_name()): + key=lambda p: p[0].get_cygwin_name()): package_file_name = self._get_filename(package, version) package_file = self._new_path / package_file_name f.write( @@ -868,8 +868,7 @@ def finish(self, private_key): f.write('requires: %(deps)s\n' % { 'deps': ' '.join(sorted(pkg.get_cygwin_name() for pkg in package.get_lib_depends())) - } - ); + }) f.write( 'install: %(filename)s %(size)s %(sha512)s\n' % { 'size': package_file.lstat().st_size, From 12dc43c7c98e7d114ffc0327ad787a63960f98b3 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Sun, 18 Sep 2016 14:51:54 -0500 Subject: [PATCH 21/28] build_packages: use parens for long import --- build_packages.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build_packages.py b/build_packages.py index 3626837..b576dda 100755 --- a/build_packages.py +++ b/build_packages.py @@ -10,7 +10,9 @@ from src import config from src import util -from src.catalog import ArchLinuxCatalog, CygwinCatalog, DebianCatalog, FreeBSDCatalog, HomebrewCatalog, NetBSDCatalog, OpenBSDCatalog, RedHatCatalog +from src.catalog import (ArchLinuxCatalog, CygwinCatalog, DebianCatalog, + FreeBSDCatalog, HomebrewCatalog, NetBSDCatalog, + OpenBSDCatalog, RedHatCatalog) from src.repository import Repository from src.version import FullVersion From ee53b0999d3da9bb8a51c98e66dcb3e6eb69d88b Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 20 Sep 2016 21:31:28 -0500 Subject: [PATCH 22/28] some static types for src.version --- src/version.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/version.py b/src/version.py index 6d92d1c..0bc6440 100644 --- a/src/version.py +++ b/src/version.py @@ -3,6 +3,8 @@ # This file is distributed under a 2-clause BSD license. # See the LICENSE file for details. +from typing import Optional as Opt + class AnyVersion: pass @@ -10,7 +12,7 @@ class AnyVersion: class SimpleVersion(AnyVersion): - def __init__(self, version): + def __init__(self, version: str) -> None: # Turn the numbers into a list of integer values. self._numbers = [int(part) for part in version.split('.')] @@ -30,7 +32,9 @@ def __str__(self): class FullVersion(AnyVersion): - def __init__(self, epoch=0, version=SimpleVersion('0'), revision=1): + def __init__(self, epoch: Opt[int]=0, + version: Opt[SimpleVersion]=SimpleVersion('0'), + revision: Opt[int]=1) -> None: self._epoch = epoch self._version = version self._revision = revision From a4c8c2e1d64f18919c952f00c46a914a9dd31250 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 20 Sep 2016 21:30:09 -0500 Subject: [PATCH 23/28] string-ize fwd refs to types to avoide NameError --- src/builder.py | 31 +++++++++++++++---------------- src/package.py | 12 ++++++------ src/repository.py | 2 +- src/util.py | 34 +++++++++++++++++----------------- 4 files changed, 39 insertions(+), 40 deletions(-) diff --git a/src/builder.py b/src/builder.py index 316831b..2aded1d 100644 --- a/src/builder.py +++ b/src/builder.py @@ -19,7 +19,7 @@ class DiffCreator: def __init__(self, source_directory: PathExt, - build_directory: BuildDirectory, + build_directory: 'BuildDirectory', filename: PathExt, subprocess: util.RunCommand) -> None: self._source_directory = source_directory self._build_directory = build_directory @@ -48,7 +48,7 @@ def __exit__(self, type, value, traceback) -> None: class FileHandle: - def __init__(self, builder: Builder, path: PathExt, io: Access) -> None: + def __init__(self, builder: 'Builder', path: PathExt, io: Access) -> None: self._builder = builder self._path = path self._io = io @@ -129,14 +129,14 @@ def diff(self, filename: PathExt) -> DiffCreator: return DiffCreator(self._path, self._builder._build_directory, filename, self._io.subprocess) - def host(self) -> FileHandle: + def host(self) -> 'FileHandle': builder = cast(TargetBuilder, self._builder) return FileHandle(builder._host_builder, self._path, self._io) - def rename(self, dst: FileHandle) -> None: + def rename(self, dst: 'FileHandle') -> None: self._path.rename(dst._path) - def cmake(self, args: List[str]=[]) -> FileHandle: + def cmake(self, args: List[str]=[]) -> 'FileHandle': builddir = self._builder._build_directory.get_new_directory() self._builder.cmake(builddir, self._path, args) return FileHandle(self._builder, builddir, self._io) @@ -169,7 +169,7 @@ def ninja_install(self): def open(self, mode: str) -> IO[Any]: return self._path.open(mode) - def path(self, path: str) -> FileHandle: + def path(self, path: str) -> 'FileHandle': return FileHandle(self._builder, self._path / path, self._io) def remove(self) -> None: @@ -188,7 +188,7 @@ def unhardcode_paths(self): class BuildHandle: - def __init__(self, builder: Builder, name: str, version: AnyVersion, + def __init__(self, builder: 'Builder', name: str, version: AnyVersion, distfiles: Dict[str, Distfile], io: Access) -> None: self._builder = builder @@ -223,11 +223,11 @@ def endian() -> str: return 'little' def executable(self, objects: List[FileHandle]) -> FileHandle: - objs = sorted(obj._path for obj in objects) + objs = sorted(str(obj._path) for obj in objects) output = self._builder._build_directory.get_new_executable() log.info('LD %s', output) subprocess = self._io.subprocess - subprocess.check_call([self._builder.get_cc(), '-o', output] + objs) + subprocess.check_call([self._builder.get_cc(), '-o', str(output)] + objs) return FileHandle(self._builder, output, self._io) def extract(self, name='%(name)s-%(version)s'): @@ -272,19 +272,19 @@ def __init__(self, platform: Callable[[object], PathExt]) -> None: self._sequence_number = 0 self._builddir = platform(config.DIR_BUILDROOT) / 'build' - def get_new_archive(self): + def get_new_archive(self) -> PathExt: path = self._builddir.pathjoin('lib%d.a' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 return path - def get_new_directory(self): + def get_new_directory(self) -> PathExt: path = self._builddir.pathjoin(str(self._sequence_number)) util.make_dir(path) self._sequence_number += 1 return path - def get_new_executable(self): + def get_new_executable(self) -> PathExt: path = self._builddir.pathjoin('bin%d' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 @@ -293,7 +293,7 @@ def get_new_executable(self): class Builder: def __init__(self, build_directory: BuildDirectory, - install_directory: PathExt, io: Access) -> None: + install_directory: Optional[PathExt], io: Access) -> None: self._build_directory = build_directory self._install_directory = install_directory self._platform = install_directory.platform() @@ -357,8 +357,7 @@ def get_gnu_triple(self): triple = subprocess.check_output(config_guess) return str(triple, encoding='ASCII').strip() - @staticmethod - def get_prefix(): + def get_prefix(self) -> PathExt: return config.DIR_BUILDROOT def install(self, source: PathExt, target: PathExt) -> None: @@ -387,7 +386,7 @@ def run(self, cwd: PathExt, command: List[str]) -> None: 'CXX=' + str(self.get_cxx()), 'CFLAGS=' + ' '.join(self._cflags), 'CXXFLAGS=' + ' '.join(self._cflags), - 'LDFLAGS=-L' + self.get_prefix().pathjoin('lib'), + 'LDFLAGS=-L' + str(self.get_prefix().pathjoin('lib')), 'PATH=%s:%s' % (self.get_prefix().pathjoin('bin'), os.getenv('PATH')), ] + command) diff --git a/src/package.py b/src/package.py index ca04af5..0aecc68 100644 --- a/src/package.py +++ b/src/package.py @@ -23,8 +23,8 @@ def __init__(self, install_directory: PathExt, io: Access, name: str, version: AnyVersion, homepage :str, maintainer: str, - build_depends: Set[HostPackage], - lib_depends: Set[HostPackage], + build_depends: Set['HostPackage'], + lib_depends: Set['HostPackage'], distfiles: Dict[str, Distfile], build_cmd: Callable[[BuildHandle], None]) -> None: self._install_directory = install_directory @@ -74,7 +74,7 @@ def build(self) -> None: self._io), self._name, self._version, self._distfiles, self._io)) - def extract(self): + def extract(self) -> None: # Copy files literally. platform = self._install_directory.platform() for source_file, target_file in util.walk_files_concurrently( @@ -90,7 +90,7 @@ def __init__(self, install_directory: PathExt, arch: str, name: str, version: AnyVersion, homepage: str, maintainer: str, host_packages: Dict[str, HostPackage], - lib_depends: Set[TargetPackage], + lib_depends: Set['TargetPackage'], build_cmd: Optional[Callable[[BuildHandle], None]], distfiles: Dict[str, Distfile]) -> None: self._install_directory = install_directory @@ -185,7 +185,7 @@ def get_redhat_name(self): def get_homepage(self): return self._homepage - def get_lib_depends(self) -> Set[TargetPackage]: + def get_lib_depends(self) -> Set['TargetPackage']: return self._lib_depends def get_maintainer(self): @@ -198,7 +198,7 @@ def get_version(self): return self._version def initialize_buildroot(self, host_depends: Set[str], - lib_depends: Set[TargetPackage]=set()) -> None: + lib_depends: Set['TargetPackage']=set()) -> None: # Ensure that all dependencies have been built. host_deps = set() for dep_name in host_depends: diff --git a/src/repository.py b/src/repository.py index cce2649..f39a341 100644 --- a/src/repository.py +++ b/src/repository.py @@ -4,7 +4,7 @@ # See the LICENSE file for details. from typing import (AbstractSet, Callable, Dict, Iterable, NamedTuple, - Optional, Tuple) + Optional, Set, Tuple) from . import config diff --git a/src/util.py b/src/util.py index 25e4661..56375e1 100644 --- a/src/util.py +++ b/src/util.py @@ -33,48 +33,48 @@ def check_output(self, args: str, **kwargs) -> Any: class PathExt(PathT): # fix lack of parameter in PurePath type decl # ref https://github.com/python/typeshed/issues/553 - def pathjoin(self, *key: _SubPath) -> PathExt: # type: ignore + def pathjoin(self, *key: _SubPath) -> 'PathExt': raise NotImplementedError - def __truediv__(self, key: _SubPath) -> PathExt: # type: ignore + def __truediv__(self, key: _SubPath) -> 'PathExt': raise NotImplementedError - def iterdir(self) -> Iterator[PathExt]: # type: ignore + def iterdir(self) -> Iterator['PathExt']: # type: ignore raise NotImplementedError - def relative_to(self, *other: _SubPath) -> PathExt: # type: ignore + def relative_to(self, *other: _SubPath) -> 'PathExt': raise NotImplementedError - def resolve(self) -> PathExt: # type: ignore + def resolve(self) -> 'PathExt': raise NotImplementedError - def with_name(self, name: str) -> PathExt: # type: ignore + def with_name(self, name: str) -> 'PathExt': # type: ignore raise NotImplementedError parent = None # type: PathExt - def __add__(self, suffix: str) -> PathExt: + def __add__(self, suffix: str) -> 'PathExt': raise NotImplementedError - def copy(self, target: PathExt): + def copy(self, target: PathT): raise NotImplementedError - def copystat(self, target: PathExt): + def copystat(self, target: PathT): raise NotImplementedError - def copymode(self, target: PathExt): + def copymode(self, target: PathT): raise NotImplementedError - def rmtree(self): + def rmtree(self) -> None: raise NotImplementedError def readlink(self) -> AnyStr: raise NotImplementedError - def link(self, dst: PathExt): + def link(self, dst: PathT): raise NotImplementedError - def platform(self) -> Callable[[object], PathExt]: + def platform(self) -> Callable[[object], 'PathExt']: raise NotImplementedError @@ -235,7 +235,7 @@ def remove_and_make_dir(path: PathExt): make_dir(path) -def hash_file(path: PathExt, checksum: hashlib.Hash): +def hash_file(path: PathExt, checksum: 'hashlib.Hash'): if path.is_symlink(): checksum.update(bytes(path.readlink(), encoding='ASCII')) else: @@ -247,19 +247,19 @@ def hash_file(path: PathExt, checksum: hashlib.Hash): checksum.update(data) -def sha256(path: PathExt) -> hashlib.Hash: +def sha256(path: PathExt) -> 'hashlib.Hash': checksum = hashlib.sha256() hash_file(path, checksum) return checksum -def sha512(path: PathExt) -> hashlib.Hash: +def sha512(path: PathExt) -> 'hashlib.Hash': checksum = hashlib.sha512() hash_file(path, checksum) return checksum -def md5(path: PathExt) -> hashlib.Hash: +def md5(path: PathExt) -> 'hashlib.Hash': checksum = hashlib.md5() hash_file(path, checksum) return checksum From db99f02ab90c0cc287c609951a6a5de69963b174 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 20 Sep 2016 21:34:07 -0500 Subject: [PATCH 24/28] test_get_target_packages(), trivial case run with `py.test`, for example --- test_suite.py | 121 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100644 test_suite.py diff --git a/test_suite.py b/test_suite.py new file mode 100644 index 0000000..ae068cf --- /dev/null +++ b/test_suite.py @@ -0,0 +1,121 @@ +from io import BytesIO, StringIO +from pathlib import PurePosixPath +from typing import AnyStr, BinaryIO, Dict, List, Optional +import logging + +from src import config +from src import util +from src.builder import Access as BuildAccess +from src.distfile import Distfile, RandomT +from src.repository import Repository + +logging.basicConfig(level=logging.INFO) +log = logging.getLogger(__name__) + + +def test_get_target_packages() -> None: + io = MockWorld(dict()) + + target_packages = get_target_packages(io.cwd(), io.io_d(), io.io_b()) + + assert len(target_packages) == len(config.ARCHITECTURES) + + an_arch = next(iter(config.ARCHITECTURES)) + assert ('everything', an_arch) in target_packages + assert str(target_packages[('everything', an_arch)]) == (an_arch + '-everything 1.0') + + +def get_target_packages(cwd, io_d, io_b): # TODO: move this to build_packages.py + repo = Repository(cwd / '_obj/install', io_d, io_b) + + for file in util.walk_files(cwd / 'packages'): + if file.name == 'BUILD': + repo.add_build_file(file, cwd / '_obj/distfiles') + return repo.get_target_packages() + + +def mock_random() -> RandomT: + from random import Random + return Random(1) + + +class _Writing(BytesIO): + def __init__(self, finalize): + BinaryIO.__init__(self) + self._finalize = finalize + + def __del__(self): + self._finalize(self.getvalue()) + + +class MockWorld(object): + class Path(util.PathExt): + State = Dict[PurePosixPath, Optional[bytes]] + + # Override Path.__new__() + def __new__(cls, _s, _p): + self = object.__new__(cls) + return self + + def __init__(self, state: State, ppp: PurePosixPath) -> None: + self._ppp = ppp + self._state = state + + def open(self, mode='r'): + if 'w' in mode: + if 'b' not in mode: + raise NotImplementedError(mode) + def done(content): + self._state[self._ppp] = content + return _Writing(done) + else: + try: + content = self._state[self._parts] + except KeyError: + raise IOError(self._parts) + return (BytesIO(content) if 'b' in mode + else StringIO(content)) + + def joinpath(self, *other): + return MockWorld.Path(self._state, self._ppp.joinpath(*other)) + + def __truediv__(self, *other): + return self.joinpath(*other) + + def is_dir(self): + return self._state.get(self._ppp, 0) is None + + def exists(self): + return self._ppp in self._state + + def __init__(self, init_fs: Path.State) -> None: + root = MockWorld.Path(init_fs, PurePosixPath('/')) + self._cwd = root / 'tmp' + self._environ = {} # type: Dict[str, str] + + def io_d(self) -> Distfile.Access: + return (self.urlopen, self.subprocess, mock_random()) + + def io_b(self) -> BuildAccess: + return (self.subprocess, self.chdir, self.getenv) + + def cwd(self) -> util.PathExt: + return self._cwd + + def getenv(self, key: str) -> str: + return self._environ[key] + + def chdir(self, path: util.PathExt) -> None: + self._cwd = path + + def urlopen(self) -> BinaryIO: + raise NotImplementedError + + + class RC(util.RunCommand): + pass + + subprocess = RC() + + +test_get_target_packages() From 444094a4c42273ae08c7c2e8a175f3b9505516e0 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 20 Sep 2016 23:26:25 -0500 Subject: [PATCH 25/28] walk fix: not is_dir() does not imply is_file() --- src/util.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/util.py b/src/util.py index 56375e1..6c8c72d 100644 --- a/src/util.py +++ b/src/util.py @@ -278,13 +278,11 @@ def walk_files(path: PathExt) -> Iterator[PathExt]: def walk(path: PathExt) -> Iterator[Tuple[PathExt, List[PathExt], List[PathExt]]]: - def is_dir(p): - return p.is_dir() if path.is_dir(): root = path dirs, files = tee(root.iterdir()) - dirs = list(filter(is_dir, dirs)) - files = list(filterfalse(is_dir, files)) + dirs = list(filter(lambda d: d.is_dir(), dirs)) + files = list(filter(lambda f: f.is_file(), files)) yield root, dirs, files for subdir in dirs: yield from walk(subdir) From b079ac78748c0178f1cfa06b7659200b97996f7d Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 20 Sep 2016 23:27:39 -0500 Subject: [PATCH 26/28] fix scope of `random` in get_starget_packages found through unit testing --- src/repository.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/repository.py b/src/repository.py index f39a341..3740da8 100644 --- a/src/repository.py +++ b/src/repository.py @@ -145,6 +145,8 @@ def get_distfiles(self) -> Iterable[Distfile]: return self._distfiles.values() def get_target_packages(self) -> Dict[Tuple[str, str], TargetPackage]: + random = self._io_d.random + # Create host packages that haven't been instantiated yet. # This implicitly checks for dependency loops. def get_host_package(name: str) -> HostPackage: @@ -179,7 +181,6 @@ def get_host_package(name: str) -> HostPackage: return self._host_packages[name] while self._deferred_host_packages: - random = self._io_d.random get_host_package( random.sample( self._deferred_host_packages.keys(), From 75c2b841de326f4b31701b4e699eef732e31ba29 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 20 Sep 2016 23:29:14 -0500 Subject: [PATCH 27/28] declare packages/** as design-time test data add __init__.py for use with pkgutil.get_data perhaps a bit of a kludge? but a harmless one --- packages/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 packages/__init__.py diff --git a/packages/__init__.py b/packages/__init__.py new file mode 100644 index 0000000..e69de29 From 0ba583598048fb0bb241f2d6fde64c1068cffd74 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 20 Sep 2016 23:31:50 -0500 Subject: [PATCH 28/28] factor mock_io out of test_suite and move test_suite into `src` dir --- src/mock_io.py | 138 ++++++++++++++++++++++++++++++++++++++++++++++ src/test_suite.py | 71 ++++++++++++++++++++++++ test_suite.py | 121 ---------------------------------------- 3 files changed, 209 insertions(+), 121 deletions(-) create mode 100644 src/mock_io.py create mode 100644 src/test_suite.py delete mode 100644 test_suite.py diff --git a/src/mock_io.py b/src/mock_io.py new file mode 100644 index 0000000..8d65d77 --- /dev/null +++ b/src/mock_io.py @@ -0,0 +1,138 @@ +from io import BytesIO, StringIO +from pathlib import PurePosixPath +from typing import Any, AnyStr, BinaryIO, Dict, Iterator, List, Optional, Tuple +import logging + +from .distfile import Distfile, RandomT +from .builder import Access as BuildAccess +from . import util + +log = logging.getLogger(__name__) + + +def mock_random() -> RandomT: + from random import Random + return Random(1) + + +class _Writing(BytesIO): + def __init__(self, finalize): + BytesIO.__init__(self) + self._finalize = finalize + + def __del__(self): + self._finalize(self.getvalue()) + + +class MockWorld(object): + class Path(util.PathExt): + State = Dict[PurePosixPath, Optional[bytes]] + _flavour = None # type: object + + # Override Path.__new__() + def __new__(cls, _s, _p): + self = object.__new__(cls) + return self + + def __init__(self, state: State, ppp: PurePosixPath) -> None: + self._ppp = ppp + self._state = state + + def __repr__(self): + return 'MockPath(%s)' % self._ppp + + def __str__(self): + return str(self._ppp) + + @property + def name(self): + return self._ppp.name + + def joinpath(self, *other): + return MockWorld.Path(self._state, self._ppp.joinpath(*other)) + + def __truediv__(self, *other): + return self.joinpath(*other) + + def exists(self): + log.debug('%s exists()?', self) + return self._ppp in self._state + + def stat(self): + raise NotImplementedError + + def lstat(self): + raise NotImplementedError + + def is_symlink(self): + return False # TODO: mock symlinks + + def is_file(self): + out = isinstance(self._state.get(self._ppp, 0), bytes) + log.debug('%s is_file()? %s', self, out) + return out + + def open(self, mode='r'): + if self.is_dir() or not self.exists(): + raise IOError + + if 'w' in mode: + if 'b' not in mode: + raise NotImplementedError(mode) + def done(content): + self._state[self._ppp] = content + return _Writing(done) + else: + content = self._state[self._ppp] + return (BytesIO(content) if 'b' in mode + else StringIO(str(content, encoding='utf-8'))) + + def is_dir(self): + v = self._state.get(self._ppp, 0) + log.debug('%s is_dir()? %.10s is None?', self, v) + return v is None + + def iterdir(self): + for k in self._state: + if self._ppp / k.name == k: + yield self / k.name + + def __init__(self, fs_data: Dict[str, Any]) -> None: + def unwrap(parent, data) -> Iterator[Tuple[PurePosixPath, Optional[bytes]]]: + for k, v in data.items(): + child = parent / k + if isinstance(v, bytes): + yield child, v + else: + yield child, None + if isinstance(data, dict): + yield from unwrap(child, v) + proot = PurePosixPath('/') + init_fs = dict(unwrap(proot, fs_data)) + root = MockWorld.Path(init_fs, proot) + self._cwd = root / 'cloudabi-ports' + self._environ = {} # type: Dict[str, str] + + def io_d(self) -> Distfile.Access: + return Distfile.Access(self.urlopen, self.subprocess, mock_random()) + + def io_b(self) -> BuildAccess: + return BuildAccess(self.subprocess, self.chdir, self.getenv) + + def cwd(self) -> util.PathExt: + return self._cwd + + def getenv(self, key: str) -> str: + return self._environ[key] + + def chdir(self, path: util.PathExt) -> None: + self._cwd = path + + def urlopen(self) -> BinaryIO: + raise NotImplementedError + + + class RC(util.RunCommand): + pass + + subprocess = RC() diff --git a/src/test_suite.py b/src/test_suite.py new file mode 100644 index 0000000..1398281 --- /dev/null +++ b/src/test_suite.py @@ -0,0 +1,71 @@ +from pkgutil import get_data +import logging + +from . import config +from . import util +from .repository import Repository +from .mock_io import MockWorld + +logging.basicConfig(level=logging.DEBUG) +log = logging.getLogger(__name__) + + +def test_walk(): + build_contents = lambda pkg_name: get_data('packages', pkg_name + '/BUILD') + files = { + 'cloudabi-ports': { + 'packages': { + 'c-runtime': { + 'BUILD': build_contents('c-runtime') + } + } + } + } + io = MockWorld(files) + actual = [(1, len(d), len(f)) + for (_, d, f) + in list(util.walk(io.cwd() / 'packages'))] + assert actual == [(1, 1, 0), (1, 0, 1)] + + +def test_get_target_packages_none() -> None: + io = MockWorld(dict()) + + target_packages = get_target_packages(io.cwd(), io.io_d(), io.io_b()) + + assert len(target_packages) == len(config.ARCHITECTURES) + + an_arch = next(iter(config.ARCHITECTURES)) + assert ('everything', an_arch) in target_packages + assert str(target_packages[('everything', an_arch)]) == (an_arch + '-everything 1.0') + + +def test_get_target_packages_one() -> None: + build_contents = lambda pkg_name: get_data('packages', pkg_name + '/BUILD') + files = { + 'cloudabi-ports': { + 'packages': { + 'c-runtime': { + 'BUILD': build_contents('c-runtime') + } + } + } + } + io = MockWorld(files) + + target_packages = get_target_packages(io.cwd(), io.io_d(), io.io_b()) + + assert len(target_packages) == 2 * len(config.ARCHITECTURES) + an_arch = next(iter(config.ARCHITECTURES)) + assert ('c-runtime', an_arch) in target_packages + + +def get_target_packages(cwd, io_d, io_b): # TODO: move this to build_packages.py + repo = Repository(cwd / '_obj/install', io_d, io_b) + + for file in util.walk_files(cwd / 'packages'): + log.debug('walk file: %s .name = %s', file, file.name) + if file.name == 'BUILD': + log.debug('adding: %s', file) + repo.add_build_file(file, cwd / '_obj/distfiles') + return repo.get_target_packages() diff --git a/test_suite.py b/test_suite.py deleted file mode 100644 index ae068cf..0000000 --- a/test_suite.py +++ /dev/null @@ -1,121 +0,0 @@ -from io import BytesIO, StringIO -from pathlib import PurePosixPath -from typing import AnyStr, BinaryIO, Dict, List, Optional -import logging - -from src import config -from src import util -from src.builder import Access as BuildAccess -from src.distfile import Distfile, RandomT -from src.repository import Repository - -logging.basicConfig(level=logging.INFO) -log = logging.getLogger(__name__) - - -def test_get_target_packages() -> None: - io = MockWorld(dict()) - - target_packages = get_target_packages(io.cwd(), io.io_d(), io.io_b()) - - assert len(target_packages) == len(config.ARCHITECTURES) - - an_arch = next(iter(config.ARCHITECTURES)) - assert ('everything', an_arch) in target_packages - assert str(target_packages[('everything', an_arch)]) == (an_arch + '-everything 1.0') - - -def get_target_packages(cwd, io_d, io_b): # TODO: move this to build_packages.py - repo = Repository(cwd / '_obj/install', io_d, io_b) - - for file in util.walk_files(cwd / 'packages'): - if file.name == 'BUILD': - repo.add_build_file(file, cwd / '_obj/distfiles') - return repo.get_target_packages() - - -def mock_random() -> RandomT: - from random import Random - return Random(1) - - -class _Writing(BytesIO): - def __init__(self, finalize): - BinaryIO.__init__(self) - self._finalize = finalize - - def __del__(self): - self._finalize(self.getvalue()) - - -class MockWorld(object): - class Path(util.PathExt): - State = Dict[PurePosixPath, Optional[bytes]] - - # Override Path.__new__() - def __new__(cls, _s, _p): - self = object.__new__(cls) - return self - - def __init__(self, state: State, ppp: PurePosixPath) -> None: - self._ppp = ppp - self._state = state - - def open(self, mode='r'): - if 'w' in mode: - if 'b' not in mode: - raise NotImplementedError(mode) - def done(content): - self._state[self._ppp] = content - return _Writing(done) - else: - try: - content = self._state[self._parts] - except KeyError: - raise IOError(self._parts) - return (BytesIO(content) if 'b' in mode - else StringIO(content)) - - def joinpath(self, *other): - return MockWorld.Path(self._state, self._ppp.joinpath(*other)) - - def __truediv__(self, *other): - return self.joinpath(*other) - - def is_dir(self): - return self._state.get(self._ppp, 0) is None - - def exists(self): - return self._ppp in self._state - - def __init__(self, init_fs: Path.State) -> None: - root = MockWorld.Path(init_fs, PurePosixPath('/')) - self._cwd = root / 'tmp' - self._environ = {} # type: Dict[str, str] - - def io_d(self) -> Distfile.Access: - return (self.urlopen, self.subprocess, mock_random()) - - def io_b(self) -> BuildAccess: - return (self.subprocess, self.chdir, self.getenv) - - def cwd(self) -> util.PathExt: - return self._cwd - - def getenv(self, key: str) -> str: - return self._environ[key] - - def chdir(self, path: util.PathExt) -> None: - self._cwd = path - - def urlopen(self) -> BinaryIO: - raise NotImplementedError - - - class RC(util.RunCommand): - pass - - subprocess = RC() - - -test_get_target_packages()