forked from spack/spack
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathinstaller.py
More file actions
2405 lines (1942 loc) · 91.5 KB
/
installer.py
File metadata and controls
2405 lines (1942 loc) · 91.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This module encapsulates package installation functionality.
The PackageInstaller coordinates concurrent builds of packages for the same
Spack instance by leveraging the dependency DAG and file system locks. It
also proceeds with the installation of non-dependent packages of failed
dependencies in order to install as many dependencies of a package as possible.
Bottom-up traversal of the dependency DAG while prioritizing packages with no
uninstalled dependencies allows multiple processes to perform concurrent builds
of separate packages associated with a spec.
File system locks enable coordination such that no two processes attempt to
build the same or a failed dependency package.
Failures to install dependency packages result in removal of their dependents'
build tasks from the current process. A failure file is also written (and
locked) so that other processes can detect the failure and adjust their build
tasks accordingly.
This module supports the coordination of local and distributed concurrent
installations of packages in a Spack instance.
"""
import copy
import glob
import heapq
import itertools
import os
import shutil
import sys
import time
from collections import defaultdict
import six
import llnl.util.filesystem as fs
import llnl.util.lock as lk
import llnl.util.tty as tty
from llnl.util.tty.color import colorize
from llnl.util.tty.log import log_output
import spack.binary_distribution as binary_distribution
import spack.compilers
import spack.error
import spack.hooks
import spack.monitor
import spack.package
import spack.package_prefs as prefs
import spack.repo
import spack.store
import spack.util.executable
from spack.util.environment import EnvironmentModifications, dump_environment
from spack.util.executable import which
from spack.util.timer import Timer
#: Counter to support unique spec sequencing that is used to ensure packages
#: with the same priority are (initially) processed in the order in which they
#: were added (see https://docs.python.org/2/library/heapq.html).
_counter = itertools.count(0)
#: Build status indicating task has been added.
STATUS_ADDED = 'queued'
#: Build status indicating the spec failed to install
STATUS_FAILED = 'failed'
#: Build status indicating the spec is being installed (possibly by another
#: process)
STATUS_INSTALLING = 'installing'
#: Build status indicating the spec was sucessfully installed
STATUS_INSTALLED = 'installed'
#: Build status indicating the task has been popped from the queue
STATUS_DEQUEUED = 'dequeued'
#: Build status indicating task has been removed (to maintain priority
#: queue invariants).
STATUS_REMOVED = 'removed'
class InstallAction(object):
#: Don't perform an install
NONE = 0
#: Do a standard install
INSTALL = 1
#: Do an overwrite install
OVERWRITE = 2
def _check_last_phase(pkg):
"""
Ensures the specified package has a valid last phase before proceeding
with its installation.
The last phase is also set to None if it is the last phase of the
package already.
Args:
pkg (spack.package.PackageBase): the package being installed
Raises:
``BadInstallPhase`` if stop_before or last phase is invalid
"""
if pkg.stop_before_phase and pkg.stop_before_phase not in pkg.phases:
raise BadInstallPhase(pkg.name, pkg.stop_before_phase)
if pkg.last_phase and pkg.last_phase not in pkg.phases:
raise BadInstallPhase(pkg.name, pkg.last_phase)
# If we got a last_phase, make sure it's not already last
if pkg.last_phase and pkg.last_phase == pkg.phases[-1]:
pkg.last_phase = None
def _handle_external_and_upstream(pkg, explicit):
"""
Determine if the package is external or upstream and register it in the
database if it is external package.
Args:
pkg (spack.package.Package): the package whose installation is under
consideration
explicit (bool): the package was explicitly requested by the user
Return:
bool: ``True`` if the package is external or upstream (so not to
be installed locally), otherwise, ``True``
"""
# For external packages the workflow is simplified, and basically
# consists in module file generation and registration in the DB.
if pkg.spec.external:
_process_external_package(pkg, explicit)
_print_installed_pkg('{0} (external {1})'
.format(pkg.prefix, package_id(pkg)))
return True
if pkg.installed_upstream:
tty.verbose('{0} is installed in an upstream Spack instance at {1}'
.format(package_id(pkg), pkg.spec.prefix))
_print_installed_pkg(pkg.prefix)
# This will result in skipping all post-install hooks. In the case
# of modules this is considered correct because we want to retrieve
# the module from the upstream Spack instance.
return True
return False
def _do_fake_install(pkg):
"""Make a fake install directory with fake executables, headers, and libraries.
"""
command = pkg.name
header = pkg.name
library = pkg.name
# Avoid double 'lib' for packages whose names already start with lib
if not pkg.name.startswith('lib'):
library = 'lib' + library
dso_suffix = '.dylib' if sys.platform == 'darwin' else '.so'
# Install fake command
fs.mkdirp(pkg.prefix.bin)
fs.touch(os.path.join(pkg.prefix.bin, command))
if sys.platform != 'win32':
chmod = which('chmod')
chmod('+x', os.path.join(pkg.prefix.bin, command))
# Install fake header file
fs.mkdirp(pkg.prefix.include)
fs.touch(os.path.join(pkg.prefix.include, header + '.h'))
# Install fake shared and static libraries
fs.mkdirp(pkg.prefix.lib)
for suffix in [dso_suffix, '.a']:
fs.touch(os.path.join(pkg.prefix.lib, library + suffix))
# Install fake man page
fs.mkdirp(pkg.prefix.man.man1)
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
dump_packages(pkg.spec, packages_dir)
def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
"""
Return a list of packages required to bootstrap `pkg`s compiler
Checks Spack's compiler configuration for a compiler that
matches the package spec.
Args:
compiler (CompilerSpec): the compiler to bootstrap
architecture (ArchSpec): the architecture for which to boostrap the
compiler
pkgs (list): the packages that may need their compiler
installed
Return:
list: list of tuples, (PackageBase, bool), for concretized compiler-related
packages that need to be installed and bool values specify whether the
package is the bootstrap compiler (``True``) or one of its dependencies
(``False``). The list will be empty if there are no compilers.
"""
tty.debug('Bootstrapping {0} compiler'.format(compiler))
compilers = spack.compilers.compilers_for_spec(
compiler, arch_spec=architecture)
if compilers:
return []
dep = spack.compilers.pkg_spec_for_compiler(compiler)
# Set the architecture for the compiler package in a way that allows the
# concretizer to back off if needed for the older bootstrapping compiler
dep.constrain('platform=%s' % str(architecture.platform))
dep.constrain('os=%s' % str(architecture.os))
dep.constrain('target=%s:' %
architecture.target.microarchitecture.family.name)
# concrete CompilerSpec has less info than concrete Spec
# concretize as Spec to add that information
dep.concretize()
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, ('build',))
)
packages = [(s.package, False) for
s in dep.traverse(order='post', root=False)]
packages.append((dep.package, True))
return packages
def _hms(seconds):
"""
Convert seconds to hours, minutes, seconds
Args:
seconds (int): time to be converted in seconds
Return:
(str) String representation of the time as #h #m #.##s
"""
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
parts = []
if h:
parts.append("%dh" % h)
if m:
parts.append("%dm" % m)
if s:
parts.append("%.2fs" % s)
return ' '.join(parts)
def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
full_hash_match=False):
"""
Extract the package from binary cache
Args:
pkg (spack.package.PackageBase): the package to install from the binary cache
cache_only (bool): only extract from binary cache
explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
Return:
bool: ``True`` if the package was extract from binary cache,
``False`` otherwise
"""
installed_from_cache = _try_install_from_binary_cache(
pkg, explicit, unsigned=unsigned, full_hash_match=full_hash_match)
pkg_id = package_id(pkg)
if not installed_from_cache:
pre = 'No binary for {0} found'.format(pkg_id)
if cache_only:
tty.die('{0} when cache-only specified'.format(pre))
tty.msg('{0}: installing from source'.format(pre))
return False
tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec)
return True
def _print_installed_pkg(message):
"""
Output a message with a package icon.
Args:
message (str): message to be output
"""
print(colorize('@*g{[+]} ') + message)
def _process_external_package(pkg, explicit):
"""
Helper function to run post install hooks and register external packages.
Args:
pkg (Package): the external package
explicit (bool): if the package was requested explicitly by the user,
``False`` if it was pulled in as a dependency of an explicit
package.
"""
assert pkg.spec.external, \
'Expected to post-install/register an external package.'
pre = '{s.name}@{s.version} :'.format(s=pkg.spec)
spec = pkg.spec
if spec.external_modules:
tty.msg('{0} has external module in {1}'
.format(pre, spec.external_modules))
tty.debug('{0} is actually installed in {1}'
.format(pre, spec.external_path))
else:
tty.debug('{0} externally installed in {1}'
.format(pre, spec.external_path))
try:
# Check if the package was already registered in the DB.
# If this is the case, then just exit.
tty.debug('{0} already registered in DB'.format(pre))
# Update the explicit state if it is necessary
if explicit:
spack.store.db.update_explicit(spec, explicit)
except KeyError:
# If not, register it and generate the module file.
# For external packages we just need to run
# post-install hooks to generate module files.
tty.debug('{0} generating module file'.format(pre))
spack.hooks.post_install(spec)
# Add to the DB
tty.debug('{0} registering into DB'.format(pre))
spack.store.db.add(spec, None, explicit=explicit)
def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
preferred_mirrors=None):
"""
Process the binary cache tarball.
Args:
pkg (spack.package.PackageBase): the package being installed
binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
preferred_mirrors (list): Optional list of urls to prefer when
attempting to download the tarball
Return:
bool: ``True`` if the package was extracted from binary cache,
else ``False``
"""
tarball = binary_distribution.download_tarball(
binary_spec, preferred_mirrors=preferred_mirrors)
# see #10063 : install from source if tarball doesn't exist
if tarball is None:
tty.msg('{0} exists in binary cache but with different hash'
.format(pkg.name))
return False
pkg_id = package_id(pkg)
tty.msg('Extracting {0} from binary cache'.format(pkg_id))
# don't print long padded paths while extracting/relocating binaries
with spack.util.path.filter_padding():
binary_distribution.extract_tarball(
binary_spec, tarball, allow_root=False, unsigned=unsigned, force=False
)
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
return True
def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
full_hash_match=False):
"""
Try to extract the package from binary cache.
Args:
pkg (spack.package.PackageBase): the package to be extracted from binary cache
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
"""
pkg_id = package_id(pkg)
tty.debug('Searching for binary cache of {0}'.format(pkg_id))
matches = binary_distribution.get_mirrors_for_spec(
pkg.spec, full_hash_match=full_hash_match)
if not matches:
return False
# In the absence of guidance from user or some other reason to prefer one
# mirror over another, any match will suffice, so just pick the first one.
preferred_mirrors = [match['mirror_url'] for match in matches]
binary_spec = matches[0]['spec']
return _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
preferred_mirrors=preferred_mirrors)
def clear_failures():
"""
Remove all failure tracking markers for the Spack instance.
"""
spack.store.db.clear_all_failures()
def combine_phase_logs(phase_log_files, log_path):
"""
Read set or list of logs and combine them into one file.
Each phase will produce it's own log, so this function aims to cat all the
separate phase log output files into the pkg.log_path. It is written
generally to accept some list of files, and a log path to combine them to.
Args:
phase_log_files (list): a list or iterator of logs to combine
log_path (str): the path to combine them to
"""
with open(log_path, 'w') as log_file:
for phase_log_file in phase_log_files:
with open(phase_log_file, 'r') as phase_log:
log_file.write(phase_log.read())
def dump_packages(spec, path):
"""
Dump all package information for a spec and its dependencies.
This creates a package repository within path for every namespace in the
spec DAG, and fills the repos with package files and patch files for every
node in the DAG.
Args:
spec (spack.spec.Spec): the Spack spec whose package information is to be dumped
path (str): the path to the build packages directory
"""
fs.mkdirp(path)
# Copy in package.py files from any dependencies.
# Note that we copy them in as they are in the *install* directory
# NOT as they are in the repository, because we want a snapshot of
# how *this* particular build was done.
for node in spec.traverse(deptype=all):
if node is not spec:
# Locate the dependency package in the install tree and find
# its provenance information.
source = spack.store.layout.build_packages_path(node)
source_repo_root = os.path.join(source, node.namespace)
# If there's no provenance installed for the package, skip it.
# If it's external, skip it because it either:
# 1) it wasn't built with Spack, so it has no Spack metadata
# 2) it was built by another Spack instance, and we do not
# (currently) use Spack metadata to associate repos with externals
# built by other Spack instances.
# Spack can always get something current from the builtin repo.
if node.external or not os.path.isdir(source_repo_root):
continue
# Create a source repo and get the pkg directory out of it.
try:
source_repo = spack.repo.Repo(source_repo_root)
source_pkg_dir = source_repo.dirname_for_package_name(
node.name)
except spack.repo.RepoError as err:
tty.debug('Failed to create source repo for {0}: {1}'
.format(node.name, str(err)))
source_pkg_dir = None
tty.warn("Warning: Couldn't copy in provenance for {0}"
.format(node.name))
# Create a destination repository
dest_repo_root = os.path.join(path, node.namespace)
if not os.path.exists(dest_repo_root):
spack.repo.create_repo(dest_repo_root)
repo = spack.repo.Repo(dest_repo_root)
# Get the location of the package in the dest repo.
dest_pkg_dir = repo.dirname_for_package_name(node.name)
if node is spec:
spack.repo.path.dump_provenance(node, dest_pkg_dir)
elif source_pkg_dir:
fs.install_tree(source_pkg_dir, dest_pkg_dir)
def get_dependent_ids(spec):
"""
Return a list of package ids for the spec's dependents
Args:
spec (spack.spec.Spec): Concretized spec
Returns:
list: list of package ids
"""
return [package_id(d.package) for d in spec.dependents()]
def install_msg(name, pid):
"""
Colorize the name/id of the package being installed
Args:
name (str): Name/id of the package being installed
pid (int): id of the installer process
Return:
str: Colorized installing message
"""
pre = '{0}: '.format(pid) if tty.show_pid() else ''
return pre + colorize('@*{Installing} @*g{%s}' % name)
def log(pkg):
"""
Copy provenance into the install directory on success
Args:
pkg (spack.package.Package): the package that was built and installed
"""
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
# Remove first if we're overwriting another build
try:
# log and env install paths are inside this
shutil.rmtree(packages_dir)
except Exception as e:
# FIXME : this potentially catches too many things...
tty.debug(e)
# Archive the whole stdout + stderr for the package
fs.install(pkg.log_path, pkg.install_log_path)
# Archive all phase log paths
for phase_log in pkg.phase_log_files:
log_file = os.path.basename(phase_log)
log_file = os.path.join(os.path.dirname(packages_dir), log_file)
fs.install(phase_log, log_file)
# Archive the environment modifications for the build.
fs.install(pkg.env_mods_path, pkg.install_env_path)
if os.path.exists(pkg.configure_args_path):
# Archive the args used for the build
fs.install(pkg.configure_args_path, pkg.install_configure_args_path)
# Finally, archive files that are specific to each package
with fs.working_dir(pkg.stage.path):
errors = six.StringIO()
target_dir = os.path.join(
spack.store.layout.metadata_path(pkg.spec), 'archived-files')
for glob_expr in pkg.archive_files:
# Check that we are trying to copy things that are
# in the stage tree (not arbitrary files)
abs_expr = os.path.realpath(glob_expr)
if os.path.realpath(pkg.stage.path) not in abs_expr:
errors.write('[OUTSIDE SOURCE PATH]: {0}\n'.format(glob_expr))
continue
# Now that we are sure that the path is within the correct
# folder, make it relative and check for matches
if os.path.isabs(glob_expr):
glob_expr = os.path.relpath(glob_expr, pkg.stage.path)
files = glob.glob(glob_expr)
for f in files:
try:
target = os.path.join(target_dir, f)
# We must ensure that the directory exists before
# copying a file in
fs.mkdirp(os.path.dirname(target))
fs.install(f, target)
except Exception as e:
tty.debug(e)
# Here try to be conservative, and avoid discarding
# the whole install procedure because of copying a
# single file failed
errors.write('[FAILED TO ARCHIVE]: {0}'.format(f))
if errors.getvalue():
error_file = os.path.join(target_dir, 'errors.txt')
fs.mkdirp(target_dir)
with open(error_file, 'w') as err:
err.write(errors.getvalue())
tty.warn('Errors occurred when archiving files.\n\t'
'See: {0}'.format(error_file))
dump_packages(pkg.spec, packages_dir)
def package_id(pkg):
"""A "unique" package identifier for installation purposes
The identifier is used to track build tasks, locks, install, and
failure statuses.
The identifier needs to distinguish between combinations of compilers
and packages for combinatorial environments.
Args:
pkg (spack.package.PackageBase): the package from which the identifier is
derived
"""
if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when "
"the spec is not concretized.")
return "{0}-{1}-{2}".format(pkg.name, pkg.version, pkg.spec.dag_hash())
class TermTitle(object):
def __init__(self, pkg_count):
# Counters used for showing status information in the terminal title
self.pkg_num = 0
self.pkg_count = pkg_count
self.pkg_ids = set()
def next_pkg(self, pkg):
pkg_id = package_id(pkg)
if pkg_id not in self.pkg_ids:
self.pkg_num += 1
self.pkg_ids.add(pkg_id)
def set(self, text):
if not spack.config.get('config:terminal_title', False):
return
if not sys.stdout.isatty():
return
status = '{0} [{1}/{2}]'.format(text, self.pkg_num, self.pkg_count)
sys.stdout.write('\033]0;Spack: {0}\007'.format(status))
sys.stdout.flush()
class TermStatusLine(object):
"""
This class is used in distributed builds to inform the user that other packages are
being installed by another process.
"""
def __init__(self, enabled):
self.enabled = enabled
self.pkg_set = set()
self.pkg_list = []
def add(self, pkg_id):
"""
Add a package to the waiting list, and if it is new, update the status line.
"""
if not self.enabled or pkg_id in self.pkg_set:
return
self.pkg_set.add(pkg_id)
self.pkg_list.append(pkg_id)
tty.msg(colorize('@*{Waiting for} @*g{%s}' % pkg_id))
sys.stdout.flush()
def clear(self):
"""
Clear the status line.
"""
if not self.enabled:
return
lines = len(self.pkg_list)
if lines == 0:
return
self.pkg_set.clear()
self.pkg_list = []
# Move the cursor to the beginning of the first "Waiting for" message and clear
# everything after it.
sys.stdout.write('\x1b[%sF\x1b[J' % lines)
sys.stdout.flush()
class PackageInstaller(object):
'''
Class for managing the install process for a Spack instance based on a
bottom-up DAG approach.
This installer can coordinate concurrent batch and interactive, local
and distributed (on a shared file system) builds for the same Spack
instance.
'''
def __init__(self, installs=[]):
""" Initialize the installer.
Args:
installs (list): list of tuples, where each
tuple consists of a package (PackageBase) and its associated
install arguments (dict)
Return:
PackageInstaller: instance
"""
# List of build requests
self.build_requests = [BuildRequest(pkg, install_args)
for pkg, install_args in installs]
# Priority queue of build tasks
self.build_pq = []
# Mapping of unique package ids to build task
self.build_tasks = {}
# Cache of package locks for failed packages, keyed on package's ids
self.failed = {}
# Cache the PID for distributed build messaging
self.pid = os.getpid()
# Cache of installed packages' unique ids
self.installed = set()
# Data store layout
self.layout = spack.store.layout
# Locks on specs being built, keyed on the package's unique id
self.locks = {}
# Cache fail_fast option to ensure if one build request asks to fail
# fast then that option applies to all build requests.
self.fail_fast = False
def __repr__(self):
"""Returns a formal representation of the package installer."""
rep = '{0}('.format(self.__class__.__name__)
for attr, value in self.__dict__.items():
rep += '{0}={1}, '.format(attr, value.__repr__())
return '{0})'.format(rep.strip(', '))
def __str__(self):
"""Returns a printable version of the package installer."""
requests = '#requests={0}'.format(len(self.build_requests))
tasks = '#tasks={0}'.format(len(self.build_tasks))
failed = 'failed ({0}) = {1}'.format(len(self.failed), self.failed)
installed = 'installed ({0}) = {1}'.format(
len(self.installed), self.installed)
return '{0}: {1}; {2}; {3}; {4}'.format(
self.pid, requests, tasks, installed, failed)
def _add_bootstrap_compilers(
self, compiler, architecture, pkgs, request, all_deps):
"""
Add bootstrap compilers and dependencies to the build queue.
Args:
compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler
pkgs (spack.package.PackageBase): the package with possible compiler
dependencies
request (BuildRequest): the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
packages = _packages_needed_to_bootstrap_compiler(
compiler, architecture, pkgs)
for (comp_pkg, is_compiler) in packages:
if package_id(comp_pkg) not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
def _add_init_task(self, pkg, request, is_compiler, all_deps):
"""
Creates and queus the initial build task for the package.
Args:
pkg (spack.package.Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
is_compiler (bool): whether task is for a bootstrap compiler
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED,
self.installed)
for dep_id in task.dependencies:
all_deps[dep_id].add(package_id(pkg))
self._push_task(task)
def _check_db(self, spec):
"""Determine if the spec is flagged as installed in the database
Args:
spec (spack.spec.Spec): spec whose database install status is being checked
Return:
(rec, installed_in_db) tuple where rec is the database record, or
None, if there is no matching spec, and installed_in_db is
``True`` if the spec is considered installed and ``False``
otherwise
"""
try:
rec = spack.store.db.get_record(spec)
installed_in_db = rec.installed if rec else False
except KeyError:
# KeyError is raised if there is no matching spec in the database
# (versus no matching specs that are installed).
rec = None
installed_in_db = False
return rec, installed_in_db
def _check_deps_status(self, request):
"""Check the install status of the requested package
Args:
request (BuildRequest): the associated install request
"""
err = 'Cannot proceed with {0}: {1}'
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep_pkg)
# Check for failure since a prefix lock is not required
if spack.store.db.prefix_failed(dep):
action = "'spack install' the dependency"
msg = '{0} is marked as an install failure: {1}' \
.format(dep_id, action)
raise InstallError(err.format(request.pkg_id, msg))
# Attempt to get a read lock to ensure another process does not
# uninstall the dependency while the requested spec is being
# installed
ltype, lock = self._ensure_locked('read', dep_pkg)
if lock is None:
msg = '{0} is write locked by another process'.format(dep_id)
raise InstallError(err.format(request.pkg_id, msg))
# Flag external and upstream packages as being installed
if dep_pkg.spec.external or dep_pkg.installed_upstream:
self._flag_installed(dep_pkg)
continue
# Check the database to see if the dependency has been installed
# and flag as such if appropriate
rec, installed_in_db = self._check_db(dep)
if installed_in_db and (
dep.dag_hash() not in request.overwrite or
rec.installation_time > request.overwrite_time):
tty.debug('Flagging {0} as installed per the database'
.format(dep_id))
self._flag_installed(dep_pkg)
else:
lock.release_read()
def _prepare_for_install(self, task):
"""
Check the database and leftover installation directories/files and
prepare for a new install attempt for an uninstalled package.
Preparation includes cleaning up installation and stage directories
and ensuring the database is up-to-date.
Args:
task (BuildTask): the build task whose associated package is
being checked
"""
install_args = task.request.install_args
keep_prefix = install_args.get('keep_prefix')
keep_stage = install_args.get('keep_stage')
restage = install_args.get('restage')
# Make sure the package is ready to be locally installed.
self._ensure_install_ready(task.pkg)
# Skip file system operations if we've already gone through them for
# this spec.
if task.pkg_id in self.installed:
# Already determined the spec has been installed
return
# Determine if the spec is flagged as installed in the database
rec, installed_in_db = self._check_db(task.pkg.spec)
if not installed_in_db:
# Ensure there is no other installed spec with the same prefix dir
if spack.store.db.is_occupied_install_prefix(task.pkg.spec.prefix):
raise InstallError(
"Install prefix collision for {0}".format(task.pkg_id),
long_msg="Prefix directory {0} already used by another "
"installed spec.".format(task.pkg.spec.prefix))
# Make sure the installation directory is in the desired state
# for uninstalled specs.
if os.path.isdir(task.pkg.spec.prefix):
if not keep_prefix:
task.pkg.remove_prefix()
else:
tty.debug('{0} is partially installed'.format(task.pkg_id))
# Destroy the stage for a locally installed, non-DIYStage, package
if restage and task.pkg.stage.managed_by_spack:
task.pkg.stage.destroy()
if installed_in_db and (
rec.spec.dag_hash() not in task.request.overwrite or
rec.installation_time > task.request.overwrite_time
):
self._update_installed(task)
# Only update the explicit entry once for the explicit package
if task.explicit:
spack.store.db.update_explicit(task.pkg.spec, True)
# In case the stage directory has already been created, this
# check ensures it is removed after we checked that the spec is
# installed.
if not keep_stage:
task.pkg.stage.destroy()
def _cleanup_all_tasks(self):
"""Cleanup all build tasks to include releasing their locks."""
for pkg_id in self.locks:
self._release_lock(pkg_id)
for pkg_id in self.failed:
self._cleanup_failed(pkg_id)
ids = list(self.build_tasks)
for pkg_id in ids:
try:
self._remove_task(pkg_id)
except Exception:
pass
def _cleanup_failed(self, pkg_id):
"""
Cleanup any failed markers for the package
Args:
pkg_id (str): identifier for the failed package
"""
lock = self.failed.get(pkg_id, None)
if lock is not None:
err = "{0} exception when removing failure tracking for {1}: {2}"
msg = 'Removing failure mark on {0}'
try:
tty.verbose(msg.format(pkg_id))
lock.release_write()
except Exception as exc:
tty.warn(err.format(exc.__class__.__name__, pkg_id, str(exc)))
def _cleanup_task(self, pkg):
"""
Cleanup the build task for the spec
Args:
pkg (spack.package.PackageBase): the package being installed
"""
self._remove_task(package_id(pkg))
# Ensure we have a read lock to prevent others from uninstalling the
# spec during our installation.
self._ensure_locked('read', pkg)
def _ensure_install_ready(self, pkg):
"""
Ensure the package is ready to install locally, which includes
already locked.
Args:
pkg (spack.package.PackageBase): the package being locally installed
"""
pkg_id = package_id(pkg)
pre = "{0} cannot be installed locally:".format(pkg_id)
# External packages cannot be installed locally.
if pkg.spec.external:
raise ExternalPackageError('{0} {1}'.format(pre, 'is external'))
# Upstream packages cannot be installed locally.
if pkg.installed_upstream:
raise UpstreamPackageError('{0} {1}'.format(pre, 'is upstream'))