mirror of
https://github.com/apple/swift.git
synced 2025-12-14 20:36:38 +01:00
[NFC] Python Lint: Fix E275(missing whitespace after keyword) issues.
This commit is contained in:
@@ -28,12 +28,12 @@ filelistFile = sys.argv[sys.argv.index('-filelist') + 1]
|
||||
|
||||
with open(filelistFile, 'r') as f:
|
||||
lines = f.readlines()
|
||||
assert(lines[0].endswith("/a.swift\n") or
|
||||
lines[0].endswith("/a.swiftmodule\n"))
|
||||
assert(lines[1].endswith("/b.swift\n") or
|
||||
lines[1].endswith("/b.swiftmodule\n"))
|
||||
assert(lines[2].endswith("/c.swift\n") or
|
||||
lines[2].endswith("/c.swiftmodule\n"))
|
||||
assert (lines[0].endswith("/a.swift\n") or
|
||||
lines[0].endswith("/a.swiftmodule\n"))
|
||||
assert (lines[1].endswith("/b.swift\n") or
|
||||
lines[1].endswith("/b.swiftmodule\n"))
|
||||
assert (lines[2].endswith("/c.swift\n") or
|
||||
lines[2].endswith("/c.swiftmodule\n"))
|
||||
|
||||
if primaryFile:
|
||||
print("Command-line primary", os.path.basename(primaryFile))
|
||||
@@ -42,7 +42,7 @@ if '-primary-filelist' in sys.argv:
|
||||
primaryFilelistFile = sys.argv[sys.argv.index('-primary-filelist') + 1]
|
||||
with open(primaryFilelistFile, 'r') as f:
|
||||
lines = f.readlines()
|
||||
assert(len(lines) == 1)
|
||||
assert len(lines) == 1
|
||||
print("Handled", os.path.basename(lines[0]).rstrip())
|
||||
elif lines[0].endswith(".swiftmodule\n"):
|
||||
print("Handled modules")
|
||||
@@ -63,7 +63,7 @@ if '-num-threads' in sys.argv:
|
||||
outputListFile = sys.argv[sys.argv.index('-output-filelist') + 1]
|
||||
with open(outputListFile, 'r') as f:
|
||||
lines = f.readlines()
|
||||
assert(lines[0].endswith("/a.o\n") or lines[0].endswith("/a.bc\n"))
|
||||
assert(lines[1].endswith("/b.o\n") or lines[1].endswith("/b.bc\n"))
|
||||
assert(lines[2].endswith("/c.o\n") or lines[2].endswith("/c.bc\n"))
|
||||
assert lines[0].endswith("/a.o\n") or lines[0].endswith("/a.bc\n")
|
||||
assert lines[1].endswith("/b.o\n") or lines[1].endswith("/b.bc\n")
|
||||
assert lines[2].endswith("/c.o\n") or lines[2].endswith("/c.bc\n")
|
||||
print("...with output!")
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import sys
|
||||
|
||||
(_, old, new) = sys.argv
|
||||
assert(len(old) == len(new))
|
||||
assert len(old) == len(new)
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
data = sys.stdin.read()
|
||||
|
||||
@@ -232,7 +232,7 @@ class Variant(Structure):
|
||||
elif var_ty == VariantType.UID:
|
||||
return UIdent(conf.lib.sourcekitd_variant_uid_get_value(self))
|
||||
else:
|
||||
assert(var_ty == VariantType.BOOL)
|
||||
assert var_ty == VariantType.BOOL
|
||||
return conf.lib.sourcekitd_variant_bool_get_value(self)
|
||||
|
||||
def to_python_array(self):
|
||||
|
||||
@@ -339,7 +339,7 @@ class UnicodeTrieGenerator(object):
|
||||
for cp in range(0, 0x110000):
|
||||
expected_value = unicode_property.get_value(cp)
|
||||
actual_value = self.get_value(cp)
|
||||
assert(expected_value == actual_value)
|
||||
assert expected_value == actual_value
|
||||
|
||||
def freeze(self):
|
||||
"""Compress internal trie representation.
|
||||
@@ -405,12 +405,12 @@ class UnicodeTrieGenerator(object):
|
||||
|
||||
def _int_to_le_bytes(self, data, width):
|
||||
if width == 1:
|
||||
assert(data & ~0xff == 0)
|
||||
assert data & ~0xff == 0
|
||||
return [data]
|
||||
if width == 2:
|
||||
assert(data & ~0xffff == 0)
|
||||
assert data & ~0xffff == 0
|
||||
return [data & 0xff, data & 0xff00]
|
||||
assert(False)
|
||||
assert False
|
||||
|
||||
def _int_list_to_le_bytes(self, ints, width):
|
||||
return [
|
||||
@@ -512,7 +512,7 @@ def get_extended_grapheme_cluster_rules_matrix(grapheme_cluster_break_table):
|
||||
rules_matrix[first][second] = action
|
||||
|
||||
# Make sure we can pack one row of the matrix into a 'uint16_t'.
|
||||
assert(len(any_value) <= 16)
|
||||
assert len(any_value) <= 16
|
||||
|
||||
result = []
|
||||
for first in any_value:
|
||||
@@ -572,9 +572,9 @@ def get_grapheme_cluster_break_tests_as_utf8(grapheme_break_test_file_name):
|
||||
return (test, boundaries)
|
||||
|
||||
# Self-test.
|
||||
assert(_convert_line(u'÷ 0903 × 0308 ÷ AC01 ÷ # abc') == (
|
||||
'\\xe0\\xa4\\x83\\xcc\\x88\\xea\\xb0\\x81', [0, 5, 8]))
|
||||
assert(_convert_line(u'÷ D800 ÷ # abc') == ('\\xe2\\x80\\x8b', [0, 3]))
|
||||
assert (_convert_line(u'÷ 0903 × 0308 ÷ AC01 ÷ # abc') ==
|
||||
('\\xe0\\xa4\\x83\\xcc\\x88\\xea\\xb0\\x81', [0, 5, 8]))
|
||||
assert _convert_line(u'÷ D800 ÷ # abc') == ('\\xe2\\x80\\x8b', [0, 3])
|
||||
|
||||
result = []
|
||||
|
||||
@@ -627,9 +627,9 @@ def get_grapheme_cluster_break_tests_as_unicode_scalars(
|
||||
return (test, boundaries)
|
||||
|
||||
# Self-test.
|
||||
assert(_convert_line('÷ 0903 × 0308 ÷ AC01 ÷ # abc') == ([
|
||||
0x0903, 0x0308, 0xac01], [0, 2, 3]))
|
||||
assert(_convert_line('÷ D800 ÷ # abc') == ([0x200b], [0, 1]))
|
||||
assert (_convert_line('÷ 0903 × 0308 ÷ AC01 ÷ # abc') ==
|
||||
([0x0903, 0x0308, 0xac01], [0, 2, 3]))
|
||||
assert _convert_line('÷ D800 ÷ # abc') == ([0x200b], [0, 1])
|
||||
|
||||
result = []
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ def main():
|
||||
# First see if we found the start of our stack trace start. If so, set
|
||||
# the found stack trace flag and continue.
|
||||
if line == 'Current stack trace:':
|
||||
assert(not found_stack_trace_start)
|
||||
assert not found_stack_trace_start
|
||||
found_stack_trace_start = True
|
||||
continue
|
||||
|
||||
@@ -97,11 +97,11 @@ def main():
|
||||
|
||||
# Check for unavailable symbols, if that was requested.
|
||||
if args.check_unavailable:
|
||||
assert('unavailable' not in matches.group('routine'))
|
||||
assert 'unavailable' not in matches.group('routine')
|
||||
|
||||
# Once we have processed all of the lines, make sure that we found at least
|
||||
# one stack trace entry.
|
||||
assert(found_stack_trace_entry)
|
||||
assert found_stack_trace_entry
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -92,7 +92,7 @@ class ListReducer(object):
|
||||
self._reset_progress()
|
||||
return False
|
||||
|
||||
assert(result == TESTRESULT_NOFAILURE)
|
||||
assert result == TESTRESULT_NOFAILURE
|
||||
# The property does not hold. Some of the elements we removed must
|
||||
# be necessary to maintain the property.
|
||||
self.mid_top = mid
|
||||
@@ -165,7 +165,7 @@ class ListReducer(object):
|
||||
def reduce_list(self):
|
||||
random.seed(0x6e5ea738) # Seed the random number generator
|
||||
(result, self.target_list, kept) = self.run_test(self.target_list, [])
|
||||
assert(result in TESTRESULTS)
|
||||
assert result in TESTRESULTS
|
||||
(should_continue, result) = self._should_continue(result)
|
||||
if not should_continue:
|
||||
return result
|
||||
|
||||
@@ -157,8 +157,8 @@ class SILOptInvoker(SILConstantInputToolInvoker):
|
||||
return self.tools.sil_opt
|
||||
|
||||
def _cmdline(self, input_file, passes, emit_sib, output_file='-'):
|
||||
assert(isinstance(emit_sib, bool))
|
||||
assert(isinstance(output_file, str))
|
||||
assert isinstance(emit_sib, bool)
|
||||
assert isinstance(output_file, str)
|
||||
base_args = self.base_args(emit_sib)
|
||||
sanity_check_file_exists(input_file)
|
||||
base_args.extend([input_file, '-o', output_file])
|
||||
@@ -189,12 +189,12 @@ class SILFuncExtractorInvoker(SILConstantInputToolInvoker):
|
||||
|
||||
def _cmdline(self, input_file, funclist_path, emit_sib, output_file='-',
|
||||
invert=False):
|
||||
assert(isinstance(emit_sib, bool))
|
||||
assert(isinstance(output_file, str))
|
||||
assert isinstance(emit_sib, bool)
|
||||
assert isinstance(output_file, str)
|
||||
|
||||
sanity_check_file_exists(input_file)
|
||||
sanity_check_file_exists(funclist_path)
|
||||
assert(isinstance(funclist_path, str))
|
||||
assert isinstance(funclist_path, str)
|
||||
base_args = self.base_args(emit_sib)
|
||||
base_args.extend([input_file, '-o', output_file,
|
||||
'-func-file=%s' % funclist_path])
|
||||
@@ -204,7 +204,7 @@ class SILFuncExtractorInvoker(SILConstantInputToolInvoker):
|
||||
|
||||
def _invoke(self, input_file, funclist_path, output_filename,
|
||||
invert=False):
|
||||
assert(isinstance(funclist_path, str))
|
||||
assert isinstance(funclist_path, str)
|
||||
cmdline = self._cmdline(input_file,
|
||||
funclist_path,
|
||||
True,
|
||||
@@ -214,7 +214,7 @@ class SILFuncExtractorInvoker(SILConstantInputToolInvoker):
|
||||
|
||||
def invoke_with_functions(self, funclist_path, output_filename,
|
||||
invert=False):
|
||||
assert(isinstance(funclist_path, str))
|
||||
assert isinstance(funclist_path, str)
|
||||
return self._invoke(self.input_file, funclist_path, output_filename,
|
||||
invert)
|
||||
|
||||
@@ -236,5 +236,5 @@ class SILNMInvoker(SILToolInvoker):
|
||||
output = subprocess.check_output(cmdline)
|
||||
for line in output.split("\n")[:-1]:
|
||||
t = tuple(line.split(" "))
|
||||
assert(len(t) == 2)
|
||||
assert len(t) == 2
|
||||
yield t
|
||||
|
||||
@@ -47,7 +47,7 @@ class TestCache(unittest.TestCase):
|
||||
def func():
|
||||
return None
|
||||
|
||||
assert(mock_lru_cache.called)
|
||||
assert mock_lru_cache.called
|
||||
|
||||
def test_call_with_no_args(self):
|
||||
# Increments the counter once per unique call.
|
||||
|
||||
@@ -115,7 +115,7 @@ class TestHelpers(unittest.TestCase):
|
||||
|
||||
mock_stream.write.assert_called_with(
|
||||
'>>> {}\n'.format(shell.quote(test_command)))
|
||||
assert(mock_stream.flush.called)
|
||||
assert mock_stream.flush.called
|
||||
|
||||
@utils.requires_module('unittest.mock')
|
||||
def test_echo_command_custom_prefix(self):
|
||||
@@ -124,7 +124,7 @@ class TestHelpers(unittest.TestCase):
|
||||
shell._echo_command('ls', mock_stream, prefix='$ ')
|
||||
|
||||
mock_stream.write.assert_called_with('$ ls\n')
|
||||
assert(mock_stream.flush.called)
|
||||
assert mock_stream.flush.called
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# _normalize_args
|
||||
|
||||
@@ -73,7 +73,7 @@ def main():
|
||||
ax.set_xlim(0.0, 1.0)
|
||||
y_min = args.y_axis_min or 1.0 - y_limit
|
||||
y_max = args.y_axis_max or 1.0 + y_limit
|
||||
assert(y_min <= y_max)
|
||||
assert y_min <= y_max
|
||||
ax.set_ylim(y_min, y_max)
|
||||
ax.grid(True)
|
||||
ax.xaxis.set_ticks(np.arange(0.0, 1.0, 0.05))
|
||||
|
||||
@@ -59,7 +59,7 @@ class Node(object):
|
||||
self.omit_when_empty = omit_when_empty
|
||||
self.collection_element = element or ""
|
||||
# For SyntaxCollections make sure that the element_name is set.
|
||||
assert(not self.is_syntax_collection() or element_name or element)
|
||||
assert not self.is_syntax_collection() or element_name or element
|
||||
# If there's a preferred name for the collection element that differs
|
||||
# from its supertype, use that.
|
||||
self.collection_element_name = element_name or self.collection_element
|
||||
|
||||
@@ -66,17 +66,17 @@ class JobStats(JobData):
|
||||
|
||||
def driver_jobs_ran(self):
|
||||
"""Return the count of a driver job's ran sub-jobs"""
|
||||
assert(self.is_driver_job())
|
||||
assert self.is_driver_job()
|
||||
return self.stats.get("Driver.NumDriverJobsRun", 0)
|
||||
|
||||
def driver_jobs_skipped(self):
|
||||
"""Return the count of a driver job's skipped sub-jobs"""
|
||||
assert(self.is_driver_job())
|
||||
assert self.is_driver_job()
|
||||
return self.stats.get("Driver.NumDriverJobsSkipped", 0)
|
||||
|
||||
def driver_jobs_total(self):
|
||||
"""Return the total count of a driver job's ran + skipped sub-jobs"""
|
||||
assert(self.is_driver_job())
|
||||
assert self.is_driver_job()
|
||||
return self.driver_jobs_ran() + self.driver_jobs_skipped()
|
||||
|
||||
def merged_with(self, other, merge_by="sum"):
|
||||
@@ -126,7 +126,7 @@ class JobStats(JobData):
|
||||
def incrementality_percentage(self):
|
||||
"""Assuming the job is a driver job, return the amount of
|
||||
jobs that actually ran, as a percentage of the total number."""
|
||||
assert(self.is_driver_job())
|
||||
assert self.is_driver_job()
|
||||
ran = self.driver_jobs_ran()
|
||||
total = self.driver_jobs_total()
|
||||
return round((float(ran) / float(total)) * 100.0, 2)
|
||||
|
||||
@@ -134,7 +134,7 @@ def map_line_to_source_file(target_filename, target_line_num):
|
||||
>>> t.close()
|
||||
>>> os.remove(t.name)
|
||||
"""
|
||||
assert(target_line_num > 0)
|
||||
assert target_line_num > 0
|
||||
map = fline_map(target_filename)
|
||||
index = bisect.bisect_left(map, (target_line_num, '', 0))
|
||||
base = map[index - 1]
|
||||
@@ -191,7 +191,7 @@ def map_line_from_source_file(source_filename, source_line_num,
|
||||
>>> t.close()
|
||||
>>> os.remove(t.name)
|
||||
"""
|
||||
assert(source_line_num > 0)
|
||||
assert source_line_num > 0
|
||||
map = fline_map(target_filename)
|
||||
|
||||
for i, (target_line_num, found_source_filename,
|
||||
|
||||
@@ -77,7 +77,7 @@ def vars_of_args(args):
|
||||
# of each of "old" and "new", and the stats are those found in the respective
|
||||
# dirs.
|
||||
def load_paired_stats_dirs(args):
|
||||
assert(len(args.remainder) == 2)
|
||||
assert len(args.remainder) == 2
|
||||
paired_stats = []
|
||||
(old, new) = args.remainder
|
||||
vargs = vars_of_args(args)
|
||||
|
||||
@@ -274,7 +274,7 @@ def converged(ctr, simplex, epsilon):
|
||||
def Nelder_Mead_simplex(objective, params, bounds, epsilon=1.0e-6):
|
||||
# By the book: https://en.wikipedia.org/wiki/Nelder%E2%80%93Mead_method
|
||||
ndim = len(params)
|
||||
assert(ndim >= 2)
|
||||
assert ndim >= 2
|
||||
|
||||
def named(tup):
|
||||
return params.__new__(params.__class__, *tup)
|
||||
@@ -332,7 +332,7 @@ def Nelder_Mead_simplex(objective, params, bounds, epsilon=1.0e-6):
|
||||
continue
|
||||
|
||||
# 5. Contraction
|
||||
assert(vr >= simplex[-2].val)
|
||||
assert vr >= simplex[-2].val
|
||||
xc = tup_add(x0, tup_mul(rho, tup_sub(xw, x0)))
|
||||
vc = f(xc)
|
||||
if vc < vw:
|
||||
@@ -362,7 +362,7 @@ def Nelder_Mead_simplex(objective, params, bounds, epsilon=1.0e-6):
|
||||
# perfectly") and finally returns (fit_params, r_squared).
|
||||
def fit_function_to_data_by_least_squares(objective, params, bounds, xs, ys):
|
||||
|
||||
assert(len(ys) > 0)
|
||||
assert len(ys) > 0
|
||||
mean_y = sum(ys) / len(ys)
|
||||
ss_total = sum((y - mean_y) ** 2 for y in ys)
|
||||
data = list(zip(xs, ys))
|
||||
|
||||
@@ -52,7 +52,7 @@ def _get_po_ordered_nodes(root, invertedDepMap):
|
||||
|
||||
# Then grab the dependents of our node.
|
||||
deps = invertedDepMap.get(node, set([]))
|
||||
assert(isinstance(deps, set))
|
||||
assert isinstance(deps, set)
|
||||
|
||||
# Then visit those and see if we have not visited any of them. Push
|
||||
# any such nodes onto the worklist and continue. If we have already
|
||||
@@ -92,13 +92,13 @@ class BuildDAG(object):
|
||||
|
||||
def set_root(self, root):
|
||||
# Assert that we always only have one root.
|
||||
assert(self.root is None)
|
||||
assert self.root is None
|
||||
self.root = root
|
||||
|
||||
def produce_schedule(self):
|
||||
# Grab the root and make sure it is not None
|
||||
root = self.root
|
||||
assert(root is not None)
|
||||
assert root is not None
|
||||
|
||||
# Then perform a post order traversal from root using our inverted
|
||||
# dependency map to compute a list of our nodes in post order.
|
||||
|
||||
@@ -692,7 +692,7 @@ class BuildScriptInvocation(object):
|
||||
if is_impl:
|
||||
self._execute_impl(pipeline, all_hosts, perform_epilogue_opts)
|
||||
else:
|
||||
assert(index != last_impl_index)
|
||||
assert index != last_impl_index
|
||||
if index > last_impl_index:
|
||||
non_darwin_cross_compile_hostnames = [
|
||||
target for target in self.args.cross_compile_hosts if not
|
||||
|
||||
@@ -16,7 +16,7 @@ class StageArgs(object):
|
||||
self.__dict__['postfix'] = stage.postfix
|
||||
self.__dict__['stage'] = stage
|
||||
self.__dict__['args'] = args
|
||||
assert(not isinstance(self.args, StageArgs))
|
||||
assert not isinstance(self.args, StageArgs)
|
||||
|
||||
def _get_stage_prefix(self):
|
||||
return self.__dict__['postfix']
|
||||
|
||||
@@ -24,7 +24,7 @@ class ProductPipeline(object):
|
||||
This class is meant to just be state.
|
||||
"""
|
||||
def __init__(self, should_run_epilogue_operations, identity, is_impl):
|
||||
assert(isinstance(identity, int))
|
||||
assert isinstance(identity, int)
|
||||
self.identity = identity
|
||||
self.products = []
|
||||
self.is_impl = is_impl
|
||||
@@ -95,16 +95,16 @@ class ProductPipelineListBuilder(object):
|
||||
|
||||
def add_product(self, product_cls, is_enabled):
|
||||
"""Add a non-impl product to the current pipeline begin constructed"""
|
||||
assert(self.current_pipeline is not None)
|
||||
assert(not self.is_current_pipeline_impl)
|
||||
assert(not product_cls.is_build_script_impl_product())
|
||||
assert self.current_pipeline is not None
|
||||
assert not self.is_current_pipeline_impl
|
||||
assert not product_cls.is_build_script_impl_product()
|
||||
self.current_pipeline.append(product_cls, is_enabled)
|
||||
|
||||
def add_impl_product(self, product_cls, is_enabled):
|
||||
"""Add a non-impl product to the current pipeline begin constructed"""
|
||||
assert(self.current_pipeline is not None)
|
||||
assert(self.is_current_pipeline_impl)
|
||||
assert(product_cls.is_build_script_impl_product())
|
||||
assert self.current_pipeline is not None
|
||||
assert self.is_current_pipeline_impl
|
||||
assert product_cls.is_build_script_impl_product()
|
||||
self.current_pipeline.append(product_cls, is_enabled)
|
||||
|
||||
def infer(self):
|
||||
@@ -120,7 +120,7 @@ class ProductPipelineListBuilder(object):
|
||||
for pipeline_i in range(len(pipeline)):
|
||||
(p, is_enabled) = pipeline[pipeline_i]
|
||||
# Make sure p has not been added multiple times to the builder.
|
||||
assert(p not in products_to_generation_index)
|
||||
assert p not in products_to_generation_index
|
||||
products_to_generation_index[p] = (i, pipeline_i)
|
||||
if is_enabled:
|
||||
final_pipeline.append(p)
|
||||
@@ -133,8 +133,8 @@ class ProductPipelineListBuilder(object):
|
||||
# our product are from our generation or earlier. If we find such a
|
||||
# dependency error.
|
||||
for (p, is_enabled) in pipeline:
|
||||
assert(all(d in products_to_generation_index for d in
|
||||
p.get_dependencies()))
|
||||
assert (all(d in products_to_generation_index for d in
|
||||
p.get_dependencies()))
|
||||
|
||||
for i in range(len(inferred_pipeline_list)):
|
||||
pipeline = inferred_pipeline_list[i]
|
||||
@@ -166,7 +166,7 @@ class ProductPipelineListBuilder(object):
|
||||
(gen_offset, index) = products_to_generation_index[p]
|
||||
# If we are from an earlier generation, our position in the
|
||||
# inferred pipeline list may be None. Initialize it now.
|
||||
assert(gen_offset <= i)
|
||||
assert gen_offset <= i
|
||||
inferred_pipeline_list[gen_offset][index] = p
|
||||
|
||||
filtered_results = []
|
||||
|
||||
@@ -213,7 +213,7 @@ def run(*args, **kwargs):
|
||||
prefix = kwargs.pop('prefix', '')
|
||||
if dry_run:
|
||||
_echo_command(dry_run, *args, env=env, prompt="{0}+ ".format(prefix))
|
||||
return(None, 0, args)
|
||||
return (None, 0, args)
|
||||
|
||||
my_pipe = subprocess.Popen(
|
||||
*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
|
||||
Reference in New Issue
Block a user