-
Notifications
You must be signed in to change notification settings - Fork 10.4k
/
Copy pathshell.py
250 lines (205 loc) · 6.56 KB
/
shell.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
# swift_build_support/shell.py ----------------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
# ----------------------------------------------------------------------------
"""
Centralized command line and file system interface for the build script.
"""
# ----------------------------------------------------------------------------
from __future__ import print_function
import os
import pipes
import shutil
import subprocess
import sys
from contextlib import contextmanager
from multiprocessing import Lock, Pool, cpu_count
from . import diagnostics
DEVNULL = getattr(subprocess, 'DEVNULL', subprocess.PIPE)
dry_run = False
def _quote(arg):
return pipes.quote(str(arg))
def quote_command(args):
"""
quote_command(args) -> str
Quote the command for passing to a shell.
"""
return ' '.join([_quote(a) for a in args])
def _coerce_dry_run(dry_run_override):
if dry_run_override is None:
return dry_run
else:
return dry_run_override
def _echo_command(dry_run, command, env=None, prompt="+ "):
output = []
if env is not None:
output += ['env'] + [_quote("%s=%s" % (k, v))
for (k, v) in sorted(env.items())]
output += [_quote(arg) for arg in command]
file = sys.stderr
if dry_run:
file = sys.stdout
print(prompt + ' '.join(output), file=file)
file.flush()
def call(command, stderr=None, env=None, dry_run=None, echo=True):
"""
call(command, ...) -> str
Execute the given command.
This function will raise an exception on any command failure.
"""
dry_run = _coerce_dry_run(dry_run)
if dry_run or echo:
_echo_command(dry_run, command, env=env)
if dry_run:
return
_env = None
if env is not None:
_env = dict(os.environ)
_env.update(env)
try:
subprocess.check_call(command, env=_env, stderr=stderr)
except subprocess.CalledProcessError as e:
diagnostics.fatal(
"command terminated with a non-zero exit status " +
str(e.returncode) + ", aborting")
except OSError as e:
diagnostics.fatal(
"could not execute '" + quote_command(command) +
"': " + e.strerror)
def capture(command, stderr=None, env=None, dry_run=None, echo=True,
optional=False, allow_non_zero_exit=False):
"""
capture(command, ...) -> str
Execute the given command and return the standard output.
This function will raise an exception on any command failure.
"""
dry_run = _coerce_dry_run(dry_run)
if dry_run or echo:
_echo_command(dry_run, command, env=env)
if dry_run:
return
_env = None
if env is not None:
_env = dict(os.environ)
_env.update(env)
try:
out = subprocess.check_output(command, env=_env, stderr=stderr)
# Coerce to `str` hack. not py3 `byte`, not py2 `unicode`.
return str(out.decode())
except subprocess.CalledProcessError as e:
if allow_non_zero_exit:
return e.output
if optional:
return None
diagnostics.fatal(
"command terminated with a non-zero exit status " +
str(e.returncode) + ", aborting")
except OSError as e:
if optional:
return None
diagnostics.fatal(
"could not execute '" + quote_command(command) +
"': " + e.strerror)
@contextmanager
def pushd(path, dry_run=None, echo=True):
dry_run = _coerce_dry_run(dry_run)
old_dir = os.getcwd()
if dry_run or echo:
_echo_command(dry_run, ["pushd", path])
if not dry_run:
os.chdir(path)
yield
if dry_run or echo:
_echo_command(dry_run, ["popd"])
if not dry_run:
os.chdir(old_dir)
def makedirs(path, dry_run=None, echo=True):
dry_run = _coerce_dry_run(dry_run)
if dry_run or echo:
_echo_command(dry_run, ['mkdir', '-p', path])
if dry_run:
return
if not os.path.isdir(path):
os.makedirs(path)
def rmtree(path, dry_run=None, echo=True):
dry_run = _coerce_dry_run(dry_run)
if dry_run or echo:
_echo_command(dry_run, ['rm', '-rf', path])
if dry_run:
return
if os.path.exists(path):
shutil.rmtree(path)
def copytree(src, dest, dry_run=None, echo=True):
dry_run = _coerce_dry_run(dry_run)
if dry_run or echo:
_echo_command(dry_run, ['cp', '-r', src, dest])
if dry_run:
return
shutil.copytree(src, dest)
# Initialized later
lock = None
def run(*args, **kwargs):
repo_path = os.getcwd()
echo_output = kwargs.pop('echo', False)
dry_run = kwargs.pop('dry_run', False)
env = kwargs.pop('env', None)
if dry_run:
_echo_command(dry_run, *args, env=env)
return(None, 0, args)
my_pipe = subprocess.Popen(
*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
(stdout, stderr) = my_pipe.communicate()
ret = my_pipe.wait()
if lock:
lock.acquire()
if echo_output:
print(repo_path)
_echo_command(dry_run, *args, env=env)
if stdout:
print(stdout, end="")
if stderr:
print(stderr, end="")
print()
if lock:
lock.release()
if ret != 0:
eout = Exception()
eout.ret = ret
eout.args = args
eout.repo_path = repo_path
eout.stderr = stderr
raise eout
return (stdout, 0, args)
def init(l):
global lock
lock = l
def run_parallel(fn, pool_args, n_processes=0):
if n_processes == 0:
n_processes = cpu_count() * 2
l = Lock()
print("Running ``%s`` with up to %d processes." %
(fn.__name__, n_processes))
pool = Pool(processes=n_processes, initializer=init, initargs=(l,))
results = pool.map_async(func=fn, iterable=pool_args).get(999999)
pool.close()
pool.join()
return results
def check_parallel_results(results, op):
fail_count = 0
if results is None:
return 0
for r in results:
if r is not None:
if fail_count == 0:
print("======%s FAILURES======" % op)
print("%s failed (ret=%d): %s" % (r.repo_path, r.ret, r))
fail_count += 1
if r.stderr:
print(r.stderr)
return fail_count