-
Notifications
You must be signed in to change notification settings - Fork 1.9k
/
Copy pathpythonpackage.py
806 lines (706 loc) · 31 KB
/
pythonpackage.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
""" This module offers highlevel functions to get package metadata
like the METADATA file, the name, or a list of dependencies.
Usage examples:
# Getting package name from pip reference:
from pytonforandroid.pythonpackage import get_package_name
print(get_package_name("pillow"))
# Outputs: "Pillow" (note the spelling!)
# Getting package dependencies:
from pytonforandroid.pythonpackage import get_package_dependencies
print(get_package_dependencies("pep517"))
# Outputs: "['pytoml']"
# Get package name from arbitrary package source:
from pytonforandroid.pythonpackage import get_package_name
print(get_package_name("/some/local/project/folder/"))
# Outputs package name
NOTE:
Yes, this module doesn't fit well into python-for-android, but this
functionality isn't available ANYWHERE ELSE, and upstream (pip, ...)
currently has no interest in taking this over, so it has no other place
to go.
(Unless someone reading this puts it into yet another packaging lib)
Reference discussion/upstream inclusion attempt:
https://github.com/pypa/packaging-problems/issues/247
"""
import functools
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
import textwrap
import time
import zipfile
from io import open # needed for python 2
from urllib.parse import unquote as urlunquote
from urllib.parse import urlparse
import toml
from pep517.envbuild import BuildEnvironment
from pep517.wrappers import Pep517HookCaller
def transform_dep_for_pip(dependency):
if dependency.find("@") > 0 and (
dependency.find("@") < dependency.find("://") or
"://" not in dependency
):
# WORKAROUND FOR UPSTREAM BUG:
# https://github.com/pypa/pip/issues/6097
# (Please REMOVE workaround once that is fixed & released upstream!)
#
# Basically, setup_requires() can contain a format pip won't install
# from a requirements.txt (PEP 508 URLs).
# To avoid this, translate to an #egg= reference:
if dependency.endswith("#"):
dependency = dependency[:-1]
url = (dependency.partition("@")[2].strip().partition("#egg")[0] +
"#egg=" +
dependency.partition("@")[0].strip()
)
return url
return dependency
def extract_metainfo_files_from_package(
package,
output_folder,
debug=False
):
""" Extracts metdata files from the given package to the given folder,
which may be referenced in any way that is permitted in
a requirements.txt file or install_requires=[] listing.
Current supported metadata files that will be extracted:
- pytoml.yml (only if package wasn't obtained as wheel)
- METADATA
"""
if package is None:
raise ValueError("package cannot be None")
if not os.path.exists(output_folder) or os.path.isfile(output_folder):
raise ValueError("output folder needs to be existing folder")
if debug:
print("extract_metainfo_files_from_package: extracting for " +
"package: " + str(package))
# A temp folder for making a package copy in case it's a local folder,
# because extracting metadata might modify files
# (creating sdists/wheels...)
temp_folder = tempfile.mkdtemp(prefix="pythonpackage-package-copy-")
try:
# Package is indeed a folder! Get a temp copy to work on:
if is_filesystem_path(package):
shutil.copytree(
parse_as_folder_reference(package),
os.path.join(temp_folder, "package"),
ignore=shutil.ignore_patterns(".tox")
)
package = os.path.join(temp_folder, "package")
# Because PEP517 can be noisy and contextlib.redirect_* fails to
# contain it, we will run the actual analysis in a separate process:
try:
subprocess.check_output([
sys.executable,
"-c",
"import importlib\n"
"import json\n"
"import os\n"
"import sys\n"
"sys.path = [os.path.dirname(sys.argv[3])] + sys.path\n"
"m = importlib.import_module(\n"
" os.path.basename(sys.argv[3]).partition('.')[0]\n"
")\n"
"m._extract_metainfo_files_from_package_unsafe("
" sys.argv[1],"
" sys.argv[2],"
")",
package, output_folder, os.path.abspath(__file__)],
stderr=subprocess.STDOUT, # make sure stderr is muted.
cwd=os.path.join(os.path.dirname(__file__), "..")
)
except subprocess.CalledProcessError as e:
output = e.output.decode("utf-8", "replace")
if debug:
print("Got error obtaining meta info.")
print("Detail output:")
print(output)
print("End of Detail output.")
raise ValueError(
"failed to obtain meta info - "
"is '{}' a valid package? "
"Detailed output:\n{}".format(package, output)
)
finally:
shutil.rmtree(temp_folder)
def _get_system_python_executable():
""" Returns the path the system-wide python binary.
(In case we're running in a virtualenv or venv)
"""
# This function is required by get_package_as_folder() to work
# inside a virtualenv, since venv creation will fail with
# the virtualenv's local python binary.
# (venv/virtualenv incompatibility)
# Abort if not in virtualenv or venv:
if not hasattr(sys, "real_prefix") and (
not hasattr(sys, "base_prefix") or
os.path.normpath(sys.base_prefix) ==
os.path.normpath(sys.prefix)):
return sys.executable
# Extract prefix we need to look in:
if hasattr(sys, "real_prefix"):
search_prefix = sys.real_prefix # virtualenv
else:
search_prefix = sys.base_prefix # venv
def python_binary_from_folder(path):
def binary_is_usable(python_bin):
""" Helper function to see if a given binary name refers
to a usable python interpreter binary
"""
# Abort if path isn't present at all or a directory:
if not os.path.exists(
os.path.join(path, python_bin)
) or os.path.isdir(os.path.join(path, python_bin)):
return
# We should check file not found anyway trying to run it,
# since it might be a dead symlink:
try:
filenotfounderror = FileNotFoundError
except NameError: # Python 2
filenotfounderror = OSError
try:
# Run it and see if version output works with no error:
subprocess.check_output([
os.path.join(path, python_bin), "--version"
], stderr=subprocess.STDOUT)
return True
except (subprocess.CalledProcessError, filenotfounderror):
return False
python_name = "python" + sys.version
while (not binary_is_usable(python_name) and
python_name.find(".") > 0):
# Try less specific binary name:
python_name = python_name.rpartition(".")[0]
if binary_is_usable(python_name):
return os.path.join(path, python_name)
return None
# Return from sys.real_prefix if present:
result = python_binary_from_folder(search_prefix)
if result is not None:
return result
# Check out all paths in $PATH:
bad_candidates = []
good_candidates = []
ever_had_nonvenv_path = False
ever_had_path_starting_with_prefix = False
for p in os.environ.get("PATH", "").split(":"):
# Skip if not possibly the real system python:
if not os.path.normpath(p).startswith(
os.path.normpath(search_prefix)
):
continue
ever_had_path_starting_with_prefix = True
# First folders might be virtualenv/venv we want to avoid:
if not ever_had_nonvenv_path:
sep = os.path.sep
if (
("system32" not in p.lower() and
"usr" not in p and
not p.startswith("/opt/python")) or
{"home", ".tox"}.intersection(set(p.split(sep))) or
"users" in p.lower()
):
# Doesn't look like bog-standard system path.
if (p.endswith(os.path.sep + "bin") or
p.endswith(os.path.sep + "bin" + os.path.sep)):
# Also ends in "bin" -> likely virtualenv/venv.
# Add as unfavorable / end of candidates:
bad_candidates.append(p)
continue
ever_had_nonvenv_path = True
good_candidates.append(p)
# If we have a bad env with PATH not containing any reference to our
# real python (travis, why would you do that to me?) then just guess
# based from the search prefix location itself:
if not ever_had_path_starting_with_prefix:
# ... and yes we're scanning all the folders for that, it's dumb
# but i'm not aware of a better way: (@JonasT)
for root, dirs, files in os.walk(search_prefix, topdown=True):
for name in dirs:
bad_candidates.append(os.path.join(root, name))
# Sort candidates by length (to prefer shorter ones):
def candidate_cmp(a, b):
return len(a) - len(b)
good_candidates = sorted(
good_candidates, key=functools.cmp_to_key(candidate_cmp)
)
bad_candidates = sorted(
bad_candidates, key=functools.cmp_to_key(candidate_cmp)
)
# See if we can now actually find the system python:
for p in good_candidates + bad_candidates:
result = python_binary_from_folder(p)
if result is not None:
return result
raise RuntimeError(
"failed to locate system python in: {}"
" - checked candidates were: {}, {}"
.format(sys.real_prefix, good_candidates, bad_candidates)
)
def get_package_as_folder(dependency):
""" This function downloads the given package / dependency and extracts
the raw contents into a folder.
Afterwards, it returns a tuple with the type of distribution obtained,
and the temporary folder it extracted to. It is the caller's
responsibility to delete the returned temp folder after use.
Examples of returned values:
("source", "/tmp/pythonpackage-venv-e84toiwjw")
("wheel", "/tmp/pythonpackage-venv-85u78uj")
What the distribution type will be depends on what pip decides to
download.
"""
venv_parent = tempfile.mkdtemp(
prefix="pythonpackage-venv-"
)
try:
# Create a venv to install into:
try:
if int(sys.version.partition(".")[0]) < 3:
# Python 2.x has no venv.
subprocess.check_output([
sys.executable, # no venv conflict possible,
# -> no need to use system python
"-m", "virtualenv",
"--python=" + _get_system_python_executable(),
os.path.join(venv_parent, 'venv')
], cwd=venv_parent)
else:
# On modern Python 3, use venv.
subprocess.check_output([
_get_system_python_executable(), "-m", "venv",
os.path.join(venv_parent, 'venv')
], cwd=venv_parent)
except subprocess.CalledProcessError as e:
output = e.output.decode('utf-8', 'replace')
raise ValueError(
'venv creation unexpectedly ' +
'failed. error output: ' + str(output)
)
venv_path = os.path.join(venv_parent, "venv")
# Update pip and wheel in venv for latest feature support:
try:
filenotfounderror = FileNotFoundError
except NameError: # Python 2.
filenotfounderror = OSError
try:
subprocess.check_output([
os.path.join(venv_path, "bin", "pip"),
"install", "-U", "pip", "wheel",
])
except filenotfounderror:
raise RuntimeError(
"venv appears to be missing pip. "
"did we fail to use a proper system python??\n"
"system python path detected: {}\n"
"os.environ['PATH']: {}".format(
_get_system_python_executable(),
os.environ.get("PATH", "")
)
)
# Create download subfolder:
os.mkdir(os.path.join(venv_path, "download"))
# Write a requirements.txt with our package and download:
with open(os.path.join(venv_path, "requirements.txt"),
"w", encoding="utf-8"
) as f:
def to_unicode(s): # Needed for Python 2.
try:
return s.decode("utf-8")
except AttributeError:
return s
f.write(to_unicode(transform_dep_for_pip(dependency)))
try:
subprocess.check_output(
[
os.path.join(venv_path, "bin", "pip"),
"download", "--no-deps", "-r", "../requirements.txt",
"-d", os.path.join(venv_path, "download")
],
stderr=subprocess.STDOUT,
cwd=os.path.join(venv_path, "download")
)
except subprocess.CalledProcessError as e:
raise RuntimeError("package download failed: " + str(e.output))
if len(os.listdir(os.path.join(venv_path, "download"))) == 0:
# No download. This can happen if the dependency has a condition
# which prohibits install in our environment.
# (the "package ; ... conditional ... " type of condition)
return (None, None)
# Get the result and make sure it's an extracted directory:
result_folder_or_file = os.path.join(
venv_path, "download",
os.listdir(os.path.join(venv_path, "download"))[0]
)
dl_type = "source"
if not os.path.isdir(result_folder_or_file):
# Must be an archive.
if result_folder_or_file.endswith((".zip", ".whl")):
if result_folder_or_file.endswith(".whl"):
dl_type = "wheel"
with zipfile.ZipFile(result_folder_or_file) as f:
f.extractall(os.path.join(venv_path,
"download", "extracted"
))
result_folder_or_file = os.path.join(
venv_path, "download", "extracted"
)
elif result_folder_or_file.find(".tar.") > 0:
# Probably a tarball.
with tarfile.open(result_folder_or_file) as f:
def is_within_directory(directory, target):
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory
def safe_extract(tar, path=".", members=None, *, numeric_owner=False):
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
tar.extractall(path, members, numeric_owner=numeric_owner)
safe_extract(f, os.path.join(venv_path,"download","extracted"))
"download", "extracted"
))
result_folder_or_file = os.path.join(
venv_path, "download", "extracted"
)
else:
raise RuntimeError(
"unknown archive or download " +
"type: " + str(result_folder_or_file)
)
# If the result is hidden away in an additional subfolder,
# descend into it:
while os.path.isdir(result_folder_or_file) and \
len(os.listdir(result_folder_or_file)) == 1 and \
os.path.isdir(os.path.join(
result_folder_or_file,
os.listdir(result_folder_or_file)[0]
)):
result_folder_or_file = os.path.join(
result_folder_or_file,
os.listdir(result_folder_or_file)[0]
)
# Copy result to new dedicated folder so we can throw away
# our entire virtualenv nonsense after returning:
result_path = tempfile.mkdtemp()
shutil.rmtree(result_path)
shutil.copytree(result_folder_or_file, result_path)
return (dl_type, result_path)
finally:
shutil.rmtree(venv_parent)
def _extract_metainfo_files_from_package_unsafe(
package,
output_path
):
# This is the unwrapped function that will
# 1. make lots of stdout/stderr noise
# 2. possibly modify files (if the package source is a local folder)
# Use extract_metainfo_files_from_package_folder instead which avoids
# these issues.
clean_up_path = False
path_type = "source"
path = parse_as_folder_reference(package)
if path is None:
# This is not a path. Download it:
(path_type, path) = get_package_as_folder(package)
if path_type is None:
# Download failed.
raise ValueError(
"cannot get info for this package, " +
"pip says it has no downloads (conditional dependency?)"
)
clean_up_path = True
try:
build_requires = []
metadata_path = None
if path_type != "wheel":
# We need to process this first to get the metadata.
# Ensure pyproject.toml is available (pep517 expects it)
if not os.path.exists(os.path.join(path, "pyproject.toml")):
with open(os.path.join(path, "pyproject.toml"), "w") as f:
f.write(textwrap.dedent(u"""\
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""))
# Copy the pyproject.toml:
shutil.copyfile(
os.path.join(path, 'pyproject.toml'),
os.path.join(output_path, 'pyproject.toml')
)
# Get build backend and requirements from pyproject.toml:
with open(os.path.join(path, 'pyproject.toml')) as f:
build_sys = toml.load(f)['build-system']
backend = build_sys["build-backend"]
build_requires.extend(build_sys["requires"])
# Get a virtualenv with build requirements and get all metadata:
env = BuildEnvironment()
metadata = None
with env:
hooks = Pep517HookCaller(path, backend)
env.pip_install(
[transform_dep_for_pip(req) for req in build_requires]
)
reqs = hooks.get_requires_for_build_wheel({})
env.pip_install([transform_dep_for_pip(req) for req in reqs])
try:
metadata = hooks.prepare_metadata_for_build_wheel(path)
except Exception: # sadly, pep517 has no good error here
pass
if metadata is not None:
metadata_path = os.path.join(
path, metadata, "METADATA"
)
else:
# This is a wheel, so metadata should be in *.dist-info folder:
metadata_path = os.path.join(
path,
[f for f in os.listdir(path) if f.endswith(".dist-info")][0],
"METADATA"
)
# Store type of metadata source. Can be "wheel", "source" for source
# distribution, and others get_package_as_folder() may support
# in the future.
with open(os.path.join(output_path, "metadata_source"), "w") as f:
try:
f.write(path_type)
except TypeError: # in python 2 path_type may be str/bytes:
f.write(path_type.decode("utf-8", "replace"))
# Copy the metadata file:
shutil.copyfile(metadata_path, os.path.join(output_path, "METADATA"))
finally:
if clean_up_path:
shutil.rmtree(path)
def is_filesystem_path(dep):
""" Convenience function around parse_as_folder_reference() to
check if a dependency refers to a folder path or something remote.
Returns True if local, False if remote.
"""
return (parse_as_folder_reference(dep) is not None)
def parse_as_folder_reference(dep):
""" See if a dependency reference refers to a folder path.
If it does, return the folder path (which parses and
resolves file:// urls in the process).
If it doesn't, return None.
"""
# Special case: pep508 urls
if dep.find("@") > 0 and (
(dep.find("@") < dep.find("/") or "/" not in dep) and
(dep.find("@") < dep.find(":") or ":" not in dep)
):
# This should be a 'pkgname @ https://...' style path, or
# 'pkname @ /local/file/path'.
return parse_as_folder_reference(dep.partition("@")[2].lstrip())
# Check if this is either not an url, or a file URL:
if dep.startswith(("/", "file://")) or (
dep.find("/") > 0 and
dep.find("://") < 0) or (dep in ["", "."]):
if dep.startswith("file://"):
dep = urlunquote(urlparse(dep).path)
return dep
return None
def _extract_info_from_package(dependency,
extract_type=None,
debug=False,
include_build_requirements=False
):
""" Internal function to extract metainfo from a package.
Currently supported info types:
- name
- dependencies (a list of dependencies)
"""
if debug:
print("_extract_info_from_package called with "
"extract_type={} include_build_requirements={}".format(
extract_type, include_build_requirements,
))
output_folder = tempfile.mkdtemp(prefix="pythonpackage-metafolder-")
try:
extract_metainfo_files_from_package(
dependency, output_folder, debug=debug
)
# Extract the type of data source we used to get the metadata:
with open(os.path.join(output_folder,
"metadata_source"), "r") as f:
metadata_source_type = f.read().strip()
# Extract main METADATA file:
with open(os.path.join(output_folder, "METADATA"),
"r", encoding="utf-8"
) as f:
# Get metadata and cut away description (is after 2 linebreaks)
metadata_entries = f.read().partition("\n\n")[0].splitlines()
if extract_type == "name":
name = None
for meta_entry in metadata_entries:
if meta_entry.lower().startswith("name:"):
return meta_entry.partition(":")[2].strip()
if name is None:
raise ValueError("failed to obtain package name")
return name
elif extract_type == "dependencies":
# First, make sure we don't attempt to return build requirements
# for wheels since they usually come without pyproject.toml
# and we haven't implemented another way to get them:
if include_build_requirements and \
metadata_source_type == "wheel":
if debug:
print("_extract_info_from_package: was called "
"with include_build_requirements=True on "
"package obtained as wheel, raising error...")
raise NotImplementedError(
"fetching build requirements for "
"wheels is not implemented"
)
# Get build requirements from pyproject.toml if requested:
requirements = []
if os.path.exists(os.path.join(output_folder,
'pyproject.toml')
) and include_build_requirements:
# Read build system from pyproject.toml file: (PEP518)
with open(os.path.join(output_folder, 'pyproject.toml')) as f:
build_sys = toml.load(f)['build-system']
if "requires" in build_sys:
requirements += build_sys["requires"]
elif include_build_requirements:
# For legacy packages with no pyproject.toml, we have to
# add setuptools as default build system.
requirements.append("setuptools")
# Add requirements from metadata:
requirements += [
entry.rpartition("Requires-Dist:")[2].strip()
for entry in metadata_entries
if entry.startswith("Requires-Dist")
]
return list(set(requirements)) # remove duplicates
finally:
shutil.rmtree(output_folder)
package_name_cache = dict()
def get_package_name(dependency,
use_cache=True):
def timestamp():
try:
return time.monotonic()
except AttributeError:
return time.time() # Python 2.
try:
value = package_name_cache[dependency]
if value[0] + 600.0 > timestamp() and use_cache:
return value[1]
except KeyError:
pass
result = _extract_info_from_package(dependency, extract_type="name")
package_name_cache[dependency] = (timestamp(), result)
return result
def get_package_dependencies(package,
recursive=False,
verbose=False,
include_build_requirements=False):
""" Obtain the dependencies from a package. Please note this
function is possibly SLOW, especially if you enable
the recursive mode.
"""
packages_processed = set()
package_queue = [package]
reqs = set()
reqs_as_names = set()
while len(package_queue) > 0:
current_queue = package_queue
package_queue = []
for package_dep in current_queue:
new_reqs = set()
if verbose:
print("get_package_dependencies: resolving dependency "
f"to package name: {package_dep}")
package = get_package_name(package_dep)
if package.lower() in packages_processed:
continue
if verbose:
print("get_package_dependencies: "
"processing package: {}".format(package))
print("get_package_dependencies: "
"Packages seen so far: {}".format(
packages_processed
))
packages_processed.add(package.lower())
# Use our regular folder processing to examine:
new_reqs = new_reqs.union(_extract_info_from_package(
package_dep, extract_type="dependencies",
debug=verbose,
include_build_requirements=include_build_requirements,
))
# Process new requirements:
if verbose:
print('get_package_dependencies: collected '
"deps of '{}': {}".format(
package_dep, str(new_reqs),
))
for new_req in new_reqs:
try:
req_name = get_package_name(new_req)
except ValueError as e:
if new_req.find(";") >= 0:
# Conditional dep where condition isn't met?
# --> ignore it
continue
if verbose:
print("get_package_dependencies: " +
"unexpected failure to get name " +
"of '" + str(new_req) + "': " +
str(e))
raise RuntimeError(
"failed to get " +
"name of dependency: " + str(e)
)
if req_name.lower() in reqs_as_names:
continue
if req_name.lower() not in packages_processed:
package_queue.append(new_req)
reqs.add(new_req)
reqs_as_names.add(req_name.lower())
# Bail out here if we're not scanning recursively:
if not recursive:
package_queue[:] = [] # wipe queue
break
if verbose:
print("get_package_dependencies: returning result: {}".format(reqs))
return reqs
def get_dep_names_of_package(
package,
keep_version_pins=False,
recursive=False,
verbose=False,
include_build_requirements=False
):
""" Gets the dependencies from the package in the given folder,
then attempts to deduce the actual package name resulting
from each dependency line, stripping away everything else.
"""
# First, obtain the dependencies:
dependencies = get_package_dependencies(
package, recursive=recursive, verbose=verbose,
include_build_requirements=include_build_requirements,
)
if verbose:
print("get_dep_names_of_package_folder: " +
"processing dependency list to names: " +
str(dependencies))
# Transform dependencies to their stripped down names:
# (they can still have version pins/restrictions, conditionals, ...)
dependency_names = set()
for dep in dependencies:
# If we are supposed to keep exact version pins, extract first:
pin_to_append = ""
if keep_version_pins and "(==" in dep and dep.endswith(")"):
# This is a dependency of the format: 'pkg (==1.0)'
pin_to_append = "==" + dep.rpartition("==")[2][:-1]
elif keep_version_pins and "==" in dep and not dep.endswith(")"):
# This is a dependency of the format: 'pkg==1.0'
pin_to_append = "==" + dep.rpartition("==")[2]
# Now get true (and e.g. case-corrected) dependency name:
dep_name = get_package_name(dep) + pin_to_append
dependency_names.add(dep_name)
return dependency_names