Some local improvements to flatpak-pip-generator

This commit is contained in:
Christoph Stahl 2024-09-30 15:49:08 +02:00
parent dd84ff361b
commit ea6a5d40ad

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
__license__ = 'MIT' __license__ = "MIT"
import argparse import argparse
import json import json
@ -22,42 +22,57 @@ except ImportError:
exit('Requirements modules is not installed. Run "pip install requirements-parser"') exit('Requirements modules is not installed. Run "pip install requirements-parser"')
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('packages', nargs='*') parser.add_argument("packages", nargs="*")
parser.add_argument('--python2', action='store_true', parser.add_argument("--python2", action="store_true", help="Look for a Python 2 package")
help='Look for a Python 2 package') parser.add_argument(
parser.add_argument('--cleanup', choices=['scripts', 'all'], "--cleanup", choices=["scripts", "all"], help="Select what to clean up after build"
help='Select what to clean up after build') )
parser.add_argument('--requirements-file', '-r', parser.add_argument("--requirements-file", "-r", help="Specify requirements.txt file")
help='Specify requirements.txt file') parser.add_argument(
parser.add_argument('--build-only', action='store_const', "--build-only",
dest='cleanup', const='all', action="store_const",
help='Clean up all files after build') dest="cleanup",
parser.add_argument('--build-isolation', action='store_true', const="all",
help="Clean up all files after build",
)
parser.add_argument(
"--build-isolation",
action="store_true",
default=False, default=False,
help=( help=(
'Do not disable build isolation. ' "Do not disable build isolation. "
'Mostly useful on pip that does\'t ' "Mostly useful on pip that does't "
'support the feature.' "support the feature."
)) ),
parser.add_argument('--ignore-installed', )
type=lambda s: s.split(','), parser.add_argument(
default='', "--ignore-installed",
help='Comma-separated list of package names for which pip ' type=lambda s: s.split(","),
'should ignore already installed packages. Useful when ' default="",
'the package is installed in the SDK but not in the ' help="Comma-separated list of package names for which pip "
'runtime.') "should ignore already installed packages. Useful when "
parser.add_argument('--checker-data', action='store_true', "the package is installed in the SDK but not in the "
help='Include x-checker-data in output for the "Flatpak External Data Checker"') "runtime.",
parser.add_argument('--output', '-o', )
help='Specify output file name') parser.add_argument(
parser.add_argument('--runtime', "--checker-data",
help='Specify a flatpak to run pip inside of a sandbox, ensures python version compatibility') action="store_true",
parser.add_argument('--yaml', action='store_true', help='Include x-checker-data in output for the "Flatpak External Data Checker"',
help='Use YAML as output format instead of JSON') )
parser.add_argument('--ignore-errors', action='store_true', parser.add_argument("--output", "-o", help="Specify output file name")
help='Ignore errors when downloading packages') parser.add_argument(
parser.add_argument('--ignore-pkg', nargs='*', "--runtime",
help='Ignore a package when generating the manifest. Can only be used with a requirements file') help="Specify a flatpak to run pip inside of a sandbox, ensures python version compatibility",
)
parser.add_argument("--yaml", action="store_true", help="Use YAML as output format instead of JSON")
parser.add_argument(
"--ignore-errors", action="store_true", help="Ignore errors when downloading packages"
)
parser.add_argument(
"--ignore-pkg",
nargs="*",
help="Ignore a package when generating the manifest. Can only be used with a requirements file",
)
opts = parser.parse_args() opts = parser.parse_args()
if opts.yaml: if opts.yaml:
@ -68,64 +83,64 @@ if opts.yaml:
def get_pypi_url(name: str, filename: str) -> str: def get_pypi_url(name: str, filename: str) -> str:
url = 'https://pypi.org/pypi/{}/json'.format(name) url = "https://pypi.org/pypi/{}/json".format(name)
print('Extracting download url for', name) print("Extracting download url for", name)
with urllib.request.urlopen(url) as response: with urllib.request.urlopen(url) as response:
body = json.loads(response.read().decode('utf-8')) body = json.loads(response.read().decode("utf-8"))
for release in body['releases'].values(): for release in body["releases"].values():
for source in release: for source in release:
if source['filename'] == filename: if source["filename"] == filename:
return source['url'] return source["url"]
raise Exception('Failed to extract url from {}'.format(url)) raise Exception("Failed to extract url from {}".format(url))
def get_tar_package_url_pypi(name: str, version: str) -> str: def get_tar_package_url_pypi(name: str, version: str) -> str:
url = 'https://pypi.org/pypi/{}/{}/json'.format(name, version) url = "https://pypi.org/pypi/{}/{}/json".format(name, version)
with urllib.request.urlopen(url) as response: with urllib.request.urlopen(url) as response:
body = json.loads(response.read().decode('utf-8')) body = json.loads(response.read().decode("utf-8"))
for ext in ['bz2', 'gz', 'xz', 'zip']: for ext in ["bz2", "gz", "xz", "zip"]:
for source in body['urls']: for source in body["urls"]:
if source['url'].endswith(ext): if source["url"].endswith(ext):
return source['url'] return source["url"]
err = 'Failed to get {}-{} source from {}'.format(name, version, url) err = "Failed to get {}-{} source from {}".format(name, version, url)
raise Exception(err) raise Exception(err)
def get_package_name(filename: str) -> str: def get_package_name(filename: str) -> str:
if filename.endswith(('bz2', 'gz', 'xz', 'zip')): if filename.endswith(("bz2", "gz", "xz", "zip")):
segments = filename.split('-') segments = filename.split("-")
if len(segments) == 2: if len(segments) == 2:
return segments[0] return segments[0]
return '-'.join(segments[:len(segments) - 1]) return "-".join(segments[: len(segments) - 1])
elif filename.endswith('whl'): elif filename.endswith("whl"):
segments = filename.split('-') segments = filename.split("-")
if len(segments) == 5: if len(segments) == 5:
return segments[0] return segments[0]
candidate = segments[: len(segments) - 4] candidate = segments[: len(segments) - 4]
# Some packages list the version number twice # Some packages list the version number twice
# e.g. PyQt5-5.15.0-5.15.0-cp35.cp36.cp37.cp38-abi3-manylinux2014_x86_64.whl # e.g. PyQt5-5.15.0-5.15.0-cp35.cp36.cp37.cp38-abi3-manylinux2014_x86_64.whl
if candidate[-1] == segments[len(segments) - 4]: if candidate[-1] == segments[len(segments) - 4]:
return '-'.join(candidate[:-1]) return "-".join(candidate[:-1])
return '-'.join(candidate) return "-".join(candidate)
else: else:
raise Exception( raise Exception(
'Downloaded filename: {} does not end with bz2, gz, xz, zip, or whl'.format(filename) "Downloaded filename: {} does not end with bz2, gz, xz, zip, or whl".format(filename)
) )
def get_file_version(filename: str) -> str: def get_file_version(filename: str) -> str:
name = get_package_name(filename) name = get_package_name(filename)
segments = filename.split(name + '-') segments = filename.split(name + "-")
version = segments[1].split('-')[0] version = segments[1].split("-")[0]
for ext in ['tar.gz', 'whl', 'tar.xz', 'tar.gz', 'tar.bz2', 'zip']: for ext in ["tar.gz", "whl", "tar.xz", "tar.gz", "tar.bz2", "zip"]:
version = version.replace('.' + ext, '') version = version.replace("." + ext, "")
return version return version
def get_file_hash(filename: str) -> str: def get_file_hash(filename: str) -> str:
sha = hashlib.sha256() sha = hashlib.sha256()
print('Generating hash for', filename.split('/')[-1]) print("Generating hash for", filename.split("/")[-1])
with open(filename, 'rb') as f: with open(filename, "rb") as f:
while True: while True:
data = f.read(1024 * 1024 * 32) data = f.read(1024 * 1024 * 32)
if not data: if not data:
@ -136,24 +151,24 @@ def get_file_hash(filename: str) -> str:
def download_tar_pypi(url: str, tempdir: str) -> None: def download_tar_pypi(url: str, tempdir: str) -> None:
with urllib.request.urlopen(url) as response: with urllib.request.urlopen(url) as response:
file_path = os.path.join(tempdir, url.split('/')[-1]) file_path = os.path.join(tempdir, url.split("/")[-1])
with open(file_path, 'x+b') as tar_file: with open(file_path, "x+b") as tar_file:
shutil.copyfileobj(response, tar_file) shutil.copyfileobj(response, tar_file)
def parse_continuation_lines(fin): def parse_continuation_lines(fin):
for line in fin: for line in fin:
line = line.rstrip('\n') line = line.rstrip("\n")
while line.endswith('\\'): while line.endswith("\\"):
try: try:
line = line[:-1] + next(fin).rstrip('\n') line = line[:-1] + next(fin).rstrip("\n")
except StopIteration: except StopIteration:
exit('Requirements have a wrong number of line continuation characters "\\"') exit('Requirements have a wrong number of line continuation characters "\\"')
yield line yield line
def fprint(string: str) -> None: def fprint(string: str) -> None:
separator = '=' * 72 # Same as `flatpak-builder` separator = "=" * 72 # Same as `flatpak-builder`
print(separator) print(separator)
print(string) print(string)
print(separator) print(separator)
@ -163,18 +178,20 @@ packages = []
if opts.requirements_file: if opts.requirements_file:
requirements_file_input = os.path.expanduser(opts.requirements_file) requirements_file_input = os.path.expanduser(opts.requirements_file)
try: try:
with open(requirements_file_input, 'r') as req_file: with open(requirements_file_input, "r") as req_file:
reqs = parse_continuation_lines(req_file) reqs = parse_continuation_lines(req_file)
reqs_as_str = '\n'.join([r.split('--hash')[0] for r in reqs]) reqs_as_str = "\n".join([r.split("--hash")[0] for r in reqs])
reqs_list_raw = reqs_as_str.splitlines() reqs_list_raw = reqs_as_str.splitlines()
py_version_regex = re.compile(r';.*python_version .+$') # Remove when pip-generator can handle python_version py_version_regex = re.compile(
reqs_list = [py_version_regex.sub('', p) for p in reqs_list_raw] r";.*python_version .+$"
) # Remove when pip-generator can handle python_version
reqs_list = [py_version_regex.sub("", p) for p in reqs_list_raw]
if opts.ignore_pkg: if opts.ignore_pkg:
reqs_new = '\n'.join(i for i in reqs_list if i not in opts.ignore_pkg) reqs_new = "\n".join(i for i in reqs_list if i.strip() not in opts.ignore_pkg)
else: else:
reqs_new = reqs_as_str reqs_new = reqs_as_str
packages = list(requirements.parse(reqs_new)) packages = list(requirements.parse(reqs_new))
with tempfile.NamedTemporaryFile('w', delete=False, prefix='requirements.') as req_file: with tempfile.NamedTemporaryFile("w", delete=False, prefix="requirements.") as req_file:
req_file.write(reqs_new) req_file.write(reqs_new)
requirements_file_output = req_file.name requirements_file_output = req_file.name
except FileNotFoundError as err: except FileNotFoundError as err:
@ -182,100 +199,103 @@ if opts.requirements_file:
sys.exit(1) sys.exit(1)
elif opts.packages: elif opts.packages:
packages = list(requirements.parse('\n'.join(opts.packages))) packages = list(requirements.parse("\n".join(opts.packages)))
with tempfile.NamedTemporaryFile('w', delete=False, prefix='requirements.') as req_file: with tempfile.NamedTemporaryFile("w", delete=False, prefix="requirements.") as req_file:
req_file.write('\n'.join(opts.packages)) req_file.write("\n".join(opts.packages))
requirements_file_output = req_file.name requirements_file_output = req_file.name
else: else:
if not len(sys.argv) > 1: if not len(sys.argv) > 1:
exit('Please specifiy either packages or requirements file argument') exit("Please specifiy either packages or requirements file argument")
else: else:
exit('This option can only be used with requirements file') exit("This option can only be used with requirements file")
for i in packages: for i in packages:
if i["name"].lower().startswith("pyqt"): if i["name"].lower().startswith("pyqt"):
print("PyQt packages are not supported by flapak-pip-generator") print("PyQt packages are not supported by flapak-pip-generator")
print("However, there is a BaseApp for PyQt available, that you should use") print("However, there is a BaseApp for PyQt available, that you should use")
print("Visit https://github.com/flathub/com.riverbankcomputing.PyQt.BaseApp for more information") print(
"Visit https://github.com/flathub/com.riverbankcomputing.PyQt.BaseApp for more information"
)
sys.exit(0) sys.exit(0)
with open(requirements_file_output, 'r') as req_file: with open(requirements_file_output, "r") as req_file:
use_hash = '--hash=' in req_file.read() use_hash = "--hash=" in req_file.read()
python_version = '2' if opts.python2 else '3' python_version = "2" if opts.python2 else "3"
if opts.python2: if opts.python2:
pip_executable = 'pip2' pip_executable = "pip2"
else: else:
pip_executable = 'pip3' pip_executable = "pip3"
if opts.runtime: if opts.runtime:
flatpak_cmd = [ flatpak_cmd = [
'flatpak', "flatpak",
'--devel', "--devel",
'--share=network', "--share=network",
'--filesystem=/tmp', "--filesystem=/tmp",
'--command={}'.format(pip_executable), "--command={}".format(pip_executable),
'run', "run",
opts.runtime opts.runtime,
] ]
if opts.requirements_file: if opts.requirements_file:
if os.path.exists(requirements_file_output): if os.path.exists(requirements_file_output):
prefix = os.path.realpath(requirements_file_output) prefix = os.path.realpath(requirements_file_output)
flag = '--filesystem={}'.format(prefix) flag = "--filesystem={}".format(prefix)
flatpak_cmd.insert(1, flag) flatpak_cmd.insert(1, flag)
else: else:
flatpak_cmd = [pip_executable] flatpak_cmd = [pip_executable]
output_path = '' output_path = ""
if opts.output: if opts.output:
output_path = os.path.dirname(opts.output) output_path = os.path.dirname(opts.output)
output_package = os.path.basename(opts.output) output_package = os.path.basename(opts.output)
elif opts.requirements_file: elif opts.requirements_file:
output_package = 'python{}-{}'.format( output_package = "python{}-{}".format(
python_version, python_version,
os.path.basename(opts.requirements_file).replace('.txt', ''), os.path.basename(opts.requirements_file).replace(".txt", ""),
) )
elif len(packages) == 1: elif len(packages) == 1:
output_package = 'python{}-{}'.format( output_package = "python{}-{}".format(
python_version, packages[0].name, python_version,
packages[0].name,
) )
else: else:
output_package = 'python{}-modules'.format(python_version) output_package = "python{}-modules".format(python_version)
if opts.yaml: if opts.yaml:
output_filename = os.path.join(output_path, output_package) + '.yaml' output_filename = os.path.join(output_path, output_package) + ".yaml"
else: else:
output_filename = os.path.join(output_path, output_package) + '.json' output_filename = os.path.join(output_path, output_package) + ".json"
modules = [] modules = []
vcs_modules = [] vcs_modules = []
sources = {} sources = {}
tempdir_prefix = 'pip-generator-{}'.format(output_package) tempdir_prefix = "pip-generator-{}".format(output_package)
with tempfile.TemporaryDirectory(prefix=tempdir_prefix) as tempdir: with tempfile.TemporaryDirectory(prefix=tempdir_prefix) as tempdir:
pip_download = flatpak_cmd + [ pip_download = flatpak_cmd + [
'download', "download",
'--exists-action=i', "--exists-action=i",
'--dest', "--dest",
tempdir, tempdir,
'-r', "-r",
requirements_file_output requirements_file_output,
] ]
if use_hash: if use_hash:
pip_download.append('--require-hashes') pip_download.append("--require-hashes")
fprint('Downloading sources') fprint("Downloading sources")
cmd = ' '.join(pip_download) cmd = " ".join(pip_download)
print('Running: "{}"'.format(cmd)) print('Running: "{}"'.format(cmd))
try: try:
subprocess.run(pip_download, check=True) subprocess.run(pip_download, check=True)
os.remove(requirements_file_output) os.remove(requirements_file_output)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
os.remove(requirements_file_output) os.remove(requirements_file_output)
print('Failed to download') print("Failed to download")
print('Please fix the module manually in the generated file') print("Please fix the module manually in the generated file")
if not opts.ignore_errors: if not opts.ignore_errors:
print('Ignore the error by passing --ignore-errors') print("Ignore the error by passing --ignore-errors")
raise raise
try: try:
@ -283,18 +303,18 @@ with tempfile.TemporaryDirectory(prefix=tempdir_prefix) as tempdir:
except FileNotFoundError: except FileNotFoundError:
pass pass
fprint('Downloading arch independent packages') fprint("Downloading arch independent packages")
for filename in os.listdir(tempdir): for filename in os.listdir(tempdir):
if not filename.endswith(('bz2', 'any.whl', 'gz', 'xz', 'zip')): if not filename.endswith(("bz2", "any.whl", "gz", "xz", "zip")):
version = get_file_version(filename) version = get_file_version(filename)
name = get_package_name(filename) name = get_package_name(filename)
url = get_tar_package_url_pypi(name, version) url = get_tar_package_url_pypi(name, version)
print('Deleting', filename) print("Deleting", filename)
try: try:
os.remove(os.path.join(tempdir, filename)) os.remove(os.path.join(tempdir, filename))
except FileNotFoundError: except FileNotFoundError:
pass pass
print('Downloading {}'.format(url)) print("Downloading {}".format(url))
download_tar_pypi(url, tempdir) download_tar_pypi(url, tempdir)
files = {get_package_name(f): [] for f in os.listdir(tempdir)} files = {get_package_name(f): [] for f in os.listdir(tempdir)}
@ -308,99 +328,111 @@ with tempfile.TemporaryDirectory(prefix=tempdir_prefix) as tempdir:
if len(files[name]) > 1: if len(files[name]) > 1:
zip_source = False zip_source = False
for f in files[name]: for f in files[name]:
if f.endswith('.zip'): if f.endswith(".zip"):
zip_source = True zip_source = True
if zip_source: if zip_source:
for f in files[name]: for f in files[name]:
if not f.endswith('.zip'): if not f.endswith(".zip"):
try: try:
os.remove(os.path.join(tempdir, f)) os.remove(os.path.join(tempdir, f))
except FileNotFoundError: except FileNotFoundError:
pass pass
vcs_packages = { vcs_packages = {
x.name: {'vcs': x.vcs, 'revision': x.revision, 'uri': x.uri} x.name: {"vcs": x.vcs, "revision": x.revision, "uri": x.uri} for x in packages if x.vcs
for x in packages
if x.vcs
} }
fprint('Obtaining hashes and urls') fprint("Obtaining hashes and urls")
for filename in os.listdir(tempdir): for filename in os.listdir(tempdir):
name = get_package_name(filename) name = get_package_name(filename)
sha256 = get_file_hash(os.path.join(tempdir, filename)) sha256 = get_file_hash(os.path.join(tempdir, filename))
if name in vcs_packages: if name in vcs_packages:
uri = vcs_packages[name]['uri'] uri = vcs_packages[name]["uri"]
revision = vcs_packages[name]['revision'] revision = vcs_packages[name]["revision"]
vcs = vcs_packages[name]['vcs'] vcs = vcs_packages[name]["vcs"]
url = 'https://' + uri.split('://', 1)[1] url = "https://" + uri.split("://", 1)[1]
s = 'commit' s = "commit"
if vcs == 'svn': if vcs == "svn":
s = 'revision' s = "revision"
source = OrderedDict([ source = OrderedDict(
('type', vcs), [
('url', url), ("type", vcs),
("url", url),
(s, revision), (s, revision),
]) ]
)
is_vcs = True is_vcs = True
else: else:
url = get_pypi_url(name, filename) url = get_pypi_url(name, filename)
source = OrderedDict([ source = OrderedDict([("type", "file"), ("url", url), ("sha256", sha256)])
('type', 'file'),
('url', url),
('sha256', sha256)])
if opts.checker_data: if opts.checker_data:
source['x-checker-data'] = { source["x-checker-data"] = {"type": "pypi", "name": name}
'type': 'pypi',
'name': name}
if url.endswith(".whl"): if url.endswith(".whl"):
source['x-checker-data']['packagetype'] = 'bdist_wheel' source["x-checker-data"]["packagetype"] = "bdist_wheel"
is_vcs = False is_vcs = False
sources[name] = {'source': source, 'vcs': is_vcs} sources[name] = {"source": source, "vcs": is_vcs}
# Python3 packages that come as part of org.freedesktop.Sdk. # Python3 packages that come as part of org.freedesktop.Sdk.
system_packages = ['cython', 'easy_install', 'mako', 'markdown', 'meson', 'pip', 'pygments', 'setuptools', 'six', 'wheel'] system_packages = [
"cython",
"easy_install",
"mako",
"markdown",
"meson",
"pip",
"pygments",
"setuptools",
"six",
"wheel",
]
fprint('Generating dependencies') fprint("Generating dependencies")
for package in packages: for package in packages:
if package.name is None: if package.name is None:
print('Warning: skipping invalid requirement specification {} because it is missing a name'.format(package.line), file=sys.stderr) print(
print('Append #egg=<pkgname> to the end of the requirement line to fix', file=sys.stderr) "Warning: skipping invalid requirement specification {} because it is missing a name".format(
package.line
),
file=sys.stderr,
)
print("Append #egg=<pkgname> to the end of the requirement line to fix", file=sys.stderr)
continue continue
elif package.name.casefold() in system_packages: elif package.name.casefold() in system_packages:
print(f"{package.name} is in system_packages. Skipping.") print(f"{package.name} is in system_packages. Skipping.")
continue continue
if len(package.extras) > 0: if len(package.extras) > 0:
extras = '[' + ','.join(extra for extra in package.extras) + ']' extras = "[" + ",".join(extra for extra in package.extras) + "]"
else: else:
extras = '' extras = ""
version_list = [x[0] + x[1] for x in package.specs] version_list = [x[0] + x[1] for x in package.specs]
version = ','.join(version_list) version = ",".join(version_list)
if package.vcs: if package.vcs:
revision = '' revision = ""
if package.revision: if package.revision:
revision = '@' + package.revision revision = "@" + package.revision
pkg = package.uri + revision + '#egg=' + package.name pkg = package.uri + revision + "#egg=" + package.name
else: else:
pkg = package.name + extras + version pkg = package.name + extras + version
dependencies = [] dependencies = []
# Downloads the package again to list dependencies # Downloads the package again to list dependencies
tempdir_prefix = 'pip-generator-{}'.format(package.name) tempdir_prefix = "pip-generator-{}".format(package.name)
with tempfile.TemporaryDirectory(prefix='{}-{}'.format(tempdir_prefix, package.name)) as tempdir: with tempfile.TemporaryDirectory(
prefix="{}-{}".format(tempdir_prefix, package.name)
) as tempdir:
pip_download = flatpak_cmd + [ pip_download = flatpak_cmd + [
'download', "download",
'--exists-action=i', "--exists-action=i",
'--dest', "--dest",
tempdir, tempdir,
] ]
try: try:
print('Generating dependencies for {}'.format(package.name)) print("Generating dependencies for {}".format(package.name))
subprocess.run(pip_download + [pkg], check=True, stdout=subprocess.DEVNULL) subprocess.run(pip_download + [pkg], check=True, stdout=subprocess.DEVNULL)
for filename in sorted(os.listdir(tempdir)): for filename in sorted(os.listdir(tempdir)):
dep_name = get_package_name(filename) dep_name = get_package_name(filename)
@ -409,55 +441,57 @@ for package in packages:
dependencies.append(dep_name) dependencies.append(dep_name)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
print('Failed to download {}'.format(package.name)) print("Failed to download {}".format(package.name))
is_vcs = True if package.vcs else False is_vcs = True if package.vcs else False
package_sources = [] package_sources = []
for dependency in dependencies: for dependency in dependencies:
if dependency in sources: if dependency in sources:
source = sources[dependency] source = sources[dependency]
elif dependency.replace('_', '-') in sources: elif dependency.replace("_", "-") in sources:
source = sources[dependency.replace('_', '-')] source = sources[dependency.replace("_", "-")]
else: else:
continue continue
if not (not source['vcs'] or is_vcs): if not (not source["vcs"] or is_vcs):
continue continue
package_sources.append(source['source']) package_sources.append(source["source"])
if package.vcs: if package.vcs:
name_for_pip = '.' name_for_pip = "."
else: else:
name_for_pip = pkg name_for_pip = pkg
module_name = 'python{}-{}'.format(python_version, package.name) module_name = "python{}-{}".format(python_version, package.name)
pip_command = [ pip_command = [
pip_executable, pip_executable,
'install', "install",
'--verbose', "--verbose",
'--exists-action=i', "--exists-action=i",
'--no-index', "--no-index",
'--find-links="file://${PWD}"', '--find-links="file://${PWD}"',
'--prefix=${FLATPAK_DEST}', "--prefix=${FLATPAK_DEST}",
'"{}"'.format(name_for_pip) '"{}"'.format(name_for_pip),
] ]
if package.name in opts.ignore_installed: if package.name in opts.ignore_installed:
pip_command.append('--ignore-installed') pip_command.append("--ignore-installed")
if not opts.build_isolation: if not opts.build_isolation:
pip_command.append('--no-build-isolation') pip_command.append("--no-build-isolation")
module = OrderedDict([ module = OrderedDict(
('name', module_name), [
('buildsystem', 'simple'), ("name", module_name),
('build-commands', [' '.join(pip_command)]), ("buildsystem", "simple"),
('sources', package_sources), ("build-commands", [" ".join(pip_command)]),
]) ("sources", package_sources),
if opts.cleanup == 'all': ]
module['cleanup'] = ['*'] )
elif opts.cleanup == 'scripts': if opts.cleanup == "all":
module['cleanup'] = ['/bin', '/share/man/man1'] module["cleanup"] = ["*"]
elif opts.cleanup == "scripts":
module["cleanup"] = ["/bin", "/share/man/man1"]
if package.vcs: if package.vcs:
vcs_modules.append(module) vcs_modules.append(module)
@ -469,15 +503,16 @@ if len(modules) == 1:
pypi_module = modules[0] pypi_module = modules[0]
else: else:
pypi_module = { pypi_module = {
'name': output_package, "name": output_package,
'buildsystem': 'simple', "buildsystem": "simple",
'build-commands': [], "build-commands": [],
'modules': modules, "modules": modules,
} }
print() print()
with open(output_filename, 'w') as output: with open(output_filename, "w") as output:
if opts.yaml: if opts.yaml:
class OrderedDumper(yaml.Dumper): class OrderedDumper(yaml.Dumper):
def increase_indent(self, flow=False, indentless=False): def increase_indent(self, flow=False, indentless=False):
return super(OrderedDumper, self).increase_indent(flow, False) return super(OrderedDumper, self).increase_indent(flow, False)
@ -491,4 +526,4 @@ with open(output_filename, 'w') as output:
yaml.dump(pypi_module, output, Dumper=OrderedDumper) yaml.dump(pypi_module, output, Dumper=OrderedDumper)
else: else:
output.write(json.dumps(pypi_module, indent=4)) output.write(json.dumps(pypi_module, indent=4))
print('Output saved to {}'.format(output_filename)) print("Output saved to {}".format(output_filename))