mirror of
https://github.com/rizsotto/scan-build.git
synced 2025-12-16 12:00:08 +01:00
add more typing
This commit is contained in:
@@ -34,6 +34,10 @@ from libscanbuild.compilation import Compilation, classify_source, \
|
||||
CompilationDatabase
|
||||
from libscanbuild.clang import get_version, get_arguments
|
||||
|
||||
from typing import Any, Dict, List, Callable, Iterable, Generator # noqa: ignore=F401
|
||||
from libscanbuild import Execution # noqa: ignore=F401
|
||||
import argparse # noqa: ignore=F401
|
||||
|
||||
__all__ = ['scan_build', 'analyze_build', 'analyze_compiler_wrapper']
|
||||
|
||||
COMPILER_WRAPPER_CC = 'analyze-cc'
|
||||
@@ -43,6 +47,7 @@ ENVIRONMENT_KEY = 'ANALYZE_BUILD'
|
||||
|
||||
@command_entry_point
|
||||
def scan_build():
|
||||
# type: () -> int
|
||||
""" Entry point for scan-build command. """
|
||||
|
||||
args = parse_args_for_scan_build()
|
||||
@@ -70,6 +75,7 @@ def scan_build():
|
||||
|
||||
@command_entry_point
|
||||
def analyze_build():
|
||||
# type: () -> int
|
||||
""" Entry point for analyze-build command. """
|
||||
|
||||
args = parse_args_for_analyze_build()
|
||||
@@ -85,6 +91,7 @@ def analyze_build():
|
||||
|
||||
|
||||
def need_analyzer(args):
|
||||
# type: (str) -> bool
|
||||
""" Check the intent of the build command.
|
||||
|
||||
When static analyzer run against project configure step, it should be
|
||||
@@ -94,16 +101,18 @@ def need_analyzer(args):
|
||||
when compiler wrappers are used. That's the moment when build setup
|
||||
check the compiler and capture the location for the build process. """
|
||||
|
||||
return len(args) and not re.search('configure|autogen', args[0])
|
||||
return len(args) > 0 and not re.search('configure|autogen', args[0])
|
||||
|
||||
|
||||
def analyze_parameters(args):
|
||||
# type: (argparse.Namespace) -> Dict[str, Any]
|
||||
""" Mapping between the command line parameters and the analyzer run
|
||||
method. The run method works with a plain dictionary, while the command
|
||||
line parameters are in a named tuple.
|
||||
The keys are very similar, and some values are preprocessed. """
|
||||
|
||||
def prefix_with(constant, pieces):
|
||||
# type: (Any, List[Any]) -> List[Any]
|
||||
""" From a sequence create another sequence where every second element
|
||||
is from the original sequence and the odd elements are the prefix.
|
||||
|
||||
@@ -112,6 +121,7 @@ def analyze_parameters(args):
|
||||
return [elem for piece in pieces for elem in [constant, piece]]
|
||||
|
||||
def direct_args(args):
|
||||
# type: (argparse.Namespace) -> List[str]
|
||||
""" A group of command line arguments can mapped to command
|
||||
line arguments of the analyzer. """
|
||||
|
||||
@@ -161,6 +171,7 @@ def analyze_parameters(args):
|
||||
|
||||
|
||||
def run_analyzer_parallel(compilations, args):
|
||||
# type: (Iterable[Compilation], argparse.Namespace) -> None
|
||||
""" Runs the analyzer against the given compilations. """
|
||||
|
||||
logging.debug('run analyzer against compilation database')
|
||||
@@ -176,6 +187,7 @@ def run_analyzer_parallel(compilations, args):
|
||||
|
||||
|
||||
def setup_environment(args):
|
||||
# type: (argparse.Namespace) -> Dict[str, str]
|
||||
""" Set up environment for build command to interpose compiler wrapper. """
|
||||
|
||||
environment = dict(os.environ)
|
||||
@@ -199,6 +211,7 @@ def setup_environment(args):
|
||||
@command_entry_point
|
||||
@wrapper_entry_point
|
||||
def analyze_compiler_wrapper(result, execution):
|
||||
# type: (int, Execution) -> None
|
||||
""" Entry point for `analyze-cc` and `analyze-c++` compiler wrappers. """
|
||||
|
||||
# don't run analyzer when compilation fails. or when it's not requested.
|
||||
@@ -215,6 +228,7 @@ def analyze_compiler_wrapper(result, execution):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def report_directory(hint, keep):
|
||||
# type: (str, bool) -> Generator[str, None, None]
|
||||
""" Responsible for the report directory.
|
||||
|
||||
hint -- could specify the parent directory of the output directory.
|
||||
@@ -278,6 +292,7 @@ def require(required):
|
||||
# 'text' or 'plist-multi-file'
|
||||
'output_failures']) # generate crash reports or not
|
||||
def run(opts):
|
||||
# type: (Dict[str, Any]) -> Dict[str, Any]
|
||||
""" Entry point to run (or not) static analyzer against a single entry
|
||||
of the compilation database.
|
||||
|
||||
@@ -296,6 +311,7 @@ def run(opts):
|
||||
|
||||
|
||||
def logging_analyzer_output(opts):
|
||||
# type: (Dict[str, Any]) -> None
|
||||
""" Display error message from analyzer. """
|
||||
|
||||
if opts and 'error_output' in opts:
|
||||
@@ -306,6 +322,7 @@ def logging_analyzer_output(opts):
|
||||
@require(['clang', 'directory', 'flags', 'source', 'output_dir', 'language',
|
||||
'error_output', 'exit_code'])
|
||||
def report_failure(opts):
|
||||
# type: (Dict[str, Any]) -> None
|
||||
""" Create report when analyzer failed.
|
||||
|
||||
The major report is the preprocessor output. The output filename generated
|
||||
@@ -313,12 +330,14 @@ def report_failure(opts):
|
||||
And some more execution context also saved into '.info.txt' file. """
|
||||
|
||||
def extension():
|
||||
# type: () -> str
|
||||
""" Generate preprocessor file extension. """
|
||||
|
||||
mapping = {'objective-c++': '.mii', 'objective-c': '.mi', 'c++': '.ii'}
|
||||
return mapping.get(opts['language'], '.i')
|
||||
|
||||
def destination():
|
||||
# type: () -> str
|
||||
""" Creates failures directory if not exits yet. """
|
||||
|
||||
failures_dir = os.path.join(opts['output_dir'], 'failures')
|
||||
@@ -332,10 +351,10 @@ def report_failure(opts):
|
||||
error = 'crash' if opts['exit_code'] < 0 else 'other_error'
|
||||
# Create preprocessor output file name. (This is blindly following the
|
||||
# Perl implementation.)
|
||||
(handle, name) = tempfile.mkstemp(suffix=extension(),
|
||||
(fd, name) = tempfile.mkstemp(suffix=extension(),
|
||||
prefix='clang_' + error + '_',
|
||||
dir=destination())
|
||||
os.close(handle)
|
||||
os.close(fd)
|
||||
# Execute Clang again, but run the syntax check only.
|
||||
try:
|
||||
cwd = opts['directory']
|
||||
@@ -362,6 +381,7 @@ def report_failure(opts):
|
||||
@require(['clang', 'directory', 'flags', 'direct_args', 'source', 'output_dir',
|
||||
'output_format'])
|
||||
def run_analyzer(opts, continuation=report_failure):
|
||||
# type: (...) -> Dict[str, Any]
|
||||
""" It assembles the analysis command line and executes it. Capture the
|
||||
output of the analysis and returns with it. If failure reports are
|
||||
requested, it calls the continuation to generate it. """
|
||||
@@ -399,6 +419,7 @@ def run_analyzer(opts, continuation=report_failure):
|
||||
|
||||
@require(['flags', 'force_debug'])
|
||||
def filter_debug_flags(opts, continuation=run_analyzer):
|
||||
# type: (...) -> Dict[str, Any]
|
||||
""" Filter out nondebug macros when requested. """
|
||||
|
||||
if opts.pop('force_debug'):
|
||||
@@ -410,6 +431,7 @@ def filter_debug_flags(opts, continuation=run_analyzer):
|
||||
|
||||
@require(['language', 'compiler', 'source', 'flags'])
|
||||
def language_check(opts, continuation=filter_debug_flags):
|
||||
# type: (...) -> Dict[str, Any]
|
||||
""" Find out the language from command line parameters or file name
|
||||
extension. The decision also influenced by the compiler invocation. """
|
||||
|
||||
@@ -427,10 +449,10 @@ def language_check(opts, continuation=filter_debug_flags):
|
||||
|
||||
if language is None:
|
||||
logging.debug('skip analysis, language not known')
|
||||
return None
|
||||
return dict()
|
||||
elif language not in accepted:
|
||||
logging.debug('skip analysis, language not supported')
|
||||
return None
|
||||
return dict()
|
||||
|
||||
logging.debug('analysis, language: %s', language)
|
||||
opts.update({'language': language,
|
||||
@@ -440,6 +462,7 @@ def language_check(opts, continuation=filter_debug_flags):
|
||||
|
||||
@require(['arch_list', 'flags'])
|
||||
def arch_check(opts, continuation=language_check):
|
||||
# type: (...) -> Dict[str, Any]
|
||||
""" Do run analyzer through one of the given architectures. """
|
||||
|
||||
disabled = frozenset({'ppc', 'ppc64'})
|
||||
@@ -459,7 +482,7 @@ def arch_check(opts, continuation=language_check):
|
||||
opts.update({'flags': ['-arch', current] + opts['flags']})
|
||||
return continuation(opts)
|
||||
logging.debug('skip analysis, found not supported arch')
|
||||
return None
|
||||
return dict()
|
||||
logging.debug('analysis, on default arch')
|
||||
return continuation(opts)
|
||||
|
||||
@@ -487,11 +510,12 @@ IGNORED_FLAGS = {
|
||||
'-sectorder': 3,
|
||||
'--param': 1,
|
||||
'--serialize-diagnostics': 1
|
||||
}
|
||||
} # type: Dict[str, int]
|
||||
|
||||
|
||||
@require(['flags'])
|
||||
def classify_parameters(opts, continuation=arch_check):
|
||||
# type: (...) -> Dict[str, Any]
|
||||
""" Prepare compiler flags (filters some and add others) and take out
|
||||
language (-x) and architecture (-arch) flags for future processing. """
|
||||
|
||||
@@ -500,7 +524,7 @@ def classify_parameters(opts, continuation=arch_check):
|
||||
'flags': [], # the filtered compiler flags
|
||||
'arch_list': [], # list of architecture flags
|
||||
'language': None, # compilation language, None, if not specified
|
||||
}
|
||||
} # type: Dict[str, Any]
|
||||
|
||||
# iterate on the compile options
|
||||
args = iter(opts['flags'])
|
||||
@@ -530,18 +554,20 @@ def classify_parameters(opts, continuation=arch_check):
|
||||
|
||||
@require(['source', 'excludes'])
|
||||
def exclude(opts, continuation=classify_parameters):
|
||||
# type: (...) -> Dict[str, Any]
|
||||
""" Analysis might be skipped, when one of the requested excluded
|
||||
directory contains the file. """
|
||||
|
||||
def contains(directory, entry):
|
||||
# type: (str, str) -> bool
|
||||
""" Check is directory contains the given file. """
|
||||
|
||||
# When a directory contains a file, then the relative path to the
|
||||
# file from that directory does not start with a parent dir prefix.
|
||||
relative = os.path.relpath(entry, directory).split(os.sep)
|
||||
return len(relative) and relative[0] != os.pardir
|
||||
return len(relative) > 0 and relative[0] != os.pardir
|
||||
|
||||
if any(contains(dir, opts['source']) for dir in opts['excludes']):
|
||||
logging.debug('skip analysis, file requested to exclude')
|
||||
return None
|
||||
return dict()
|
||||
return continuation(opts)
|
||||
|
||||
@@ -18,10 +18,11 @@ import sys
|
||||
import argparse
|
||||
import logging
|
||||
import tempfile
|
||||
from typing import Tuple, Dict # noqa: ignore=F401
|
||||
from libscanbuild import reconfigure_logging
|
||||
from libscanbuild.clang import get_checkers
|
||||
|
||||
from typing import Tuple, Dict # noqa: ignore=F401
|
||||
|
||||
__all__ = ['parse_args_for_intercept_build', 'parse_args_for_analyze_build',
|
||||
'parse_args_for_scan_build']
|
||||
|
||||
|
||||
@@ -9,9 +9,10 @@ Since Clang command line interface is so rich, but this project is using only
|
||||
a subset of that, it makes sense to create a function specific wrapper. """
|
||||
|
||||
import re
|
||||
from libscanbuild import shell_split, run_command
|
||||
|
||||
from typing import List, Set, FrozenSet, Callable # noqa: ignore=F401
|
||||
from typing import Iterable, Tuple, Dict # noqa: ignore=F401
|
||||
from libscanbuild import shell_split, run_command
|
||||
|
||||
__all__ = ['get_version', 'get_arguments', 'get_checkers']
|
||||
|
||||
|
||||
@@ -10,9 +10,10 @@ import os
|
||||
import collections
|
||||
import logging
|
||||
import json
|
||||
from typing import List, Iterable, Dict, Tuple, Type, Any # noqa: ignore=F401
|
||||
from libscanbuild import Execution, shell_split, run_command
|
||||
|
||||
from typing import List, Iterable, Dict, Tuple, Type, Any # noqa: ignore=F401
|
||||
|
||||
__all__ = ['classify_source', 'Compilation', 'CompilationDatabase']
|
||||
|
||||
# Map of ignored compiler option for the creation of a compilation database.
|
||||
|
||||
@@ -29,8 +29,6 @@ import re
|
||||
import sys
|
||||
import uuid
|
||||
import subprocess
|
||||
import argparse # noqa: ignore=F401
|
||||
from typing import Iterable, Dict, Tuple, List # noqa: ignore=F401
|
||||
|
||||
from libear import build_libear, temporary_directory
|
||||
from libscanbuild import command_entry_point, wrapper_entry_point, \
|
||||
@@ -38,6 +36,9 @@ from libscanbuild import command_entry_point, wrapper_entry_point, \
|
||||
from libscanbuild.arguments import parse_args_for_intercept_build
|
||||
from libscanbuild.compilation import Compilation, CompilationDatabase
|
||||
|
||||
from typing import Iterable, Dict, Tuple, List # noqa: ignore=F401
|
||||
import argparse # noqa: ignore=F401
|
||||
|
||||
__all__ = ['capture', 'intercept_build', 'intercept_compiler_wrapper']
|
||||
|
||||
COMPILER_WRAPPER_CC = 'intercept-cc'
|
||||
|
||||
@@ -19,12 +19,19 @@ import glob
|
||||
import json
|
||||
import logging
|
||||
import datetime
|
||||
import getpass
|
||||
import socket
|
||||
|
||||
from libscanbuild.clang import get_version
|
||||
|
||||
from typing import Dict, List, Callable, Any, Set, Generator, Iterator # noqa: ignore=F401
|
||||
import argparse # noqa: ignore=F401
|
||||
|
||||
__all__ = ['document']
|
||||
|
||||
|
||||
def document(args):
|
||||
# type: (argparse.Namespace) -> int
|
||||
""" Generates cover report and returns the number of bugs/crashes. """
|
||||
|
||||
html_reports_available = args.output_format in {'html', 'plist-html'}
|
||||
@@ -62,11 +69,9 @@ def document(args):
|
||||
|
||||
|
||||
def assemble_cover(args, prefix, fragments):
|
||||
# type: (argparse.Namespace, str, List[str]) -> None
|
||||
""" Put together the fragments into a final report. """
|
||||
|
||||
import getpass
|
||||
import socket
|
||||
|
||||
if args.html_title is None:
|
||||
args.html_title = os.path.basename(prefix) + ' - analyzer results'
|
||||
|
||||
@@ -161,6 +166,7 @@ def bug_summary(output_dir, bug_counter):
|
||||
|
||||
|
||||
def bug_report(output_dir, prefix):
|
||||
# type: (str, str) -> str
|
||||
""" Creates a fragment from the analyzer reports. """
|
||||
|
||||
pretty = prettify_bug(prefix, output_dir)
|
||||
@@ -208,6 +214,7 @@ def bug_report(output_dir, prefix):
|
||||
|
||||
|
||||
def crash_report(output_dir, prefix):
|
||||
# type: (str, str) -> str
|
||||
""" Creates a fragment from the compiler crashes. """
|
||||
|
||||
pretty = prettify_crash(prefix, output_dir)
|
||||
@@ -246,14 +253,15 @@ def crash_report(output_dir, prefix):
|
||||
|
||||
|
||||
def read_crashes(output_dir):
|
||||
# type: (str) -> Iterator[Dict[str, Any]]
|
||||
""" Generate a unique sequence of crashes from given output directory. """
|
||||
|
||||
return (parse_crash(filename)
|
||||
for filename in glob.iglob(os.path.join(output_dir, 'failures',
|
||||
'*.info.txt')))
|
||||
pattern = os.path.join(output_dir, 'failures', '*.info.txt') # type: str
|
||||
return (parse_crash(filename) for filename in glob.iglob(pattern))
|
||||
|
||||
|
||||
def read_bugs(output_dir, html):
|
||||
# type: (str, bool) -> Iterator[Dict[str, Any]]
|
||||
""" Generate a unique sequence of bugs from given output directory.
|
||||
|
||||
Duplicates can be in a project if the same module was compiled multiple
|
||||
@@ -261,7 +269,7 @@ def read_bugs(output_dir, html):
|
||||
the final report (cover) only once. """
|
||||
|
||||
parser = parse_bug_html if html else parse_bug_plist
|
||||
pattern = '*.html' if html else '*.plist'
|
||||
pattern = '*.html' if html else '*.plist' # type: str
|
||||
|
||||
duplicate = duplicate_check(
|
||||
lambda bug: '{bug_line}.{bug_path_length}:{bug_file}'.format(**bug))
|
||||
@@ -275,6 +283,7 @@ def read_bugs(output_dir, html):
|
||||
|
||||
|
||||
def parse_bug_plist(filename):
|
||||
# type: (str) -> Generator[Dict[str, Any], None, None]
|
||||
""" Returns the generator of bugs from a single .plist file. """
|
||||
|
||||
content = plistlib.readPlist(filename)
|
||||
@@ -295,6 +304,7 @@ def parse_bug_plist(filename):
|
||||
|
||||
|
||||
def parse_bug_html(filename):
|
||||
# type: (str) -> Generator[Dict[str, Any], None, None]
|
||||
""" Parse out the bug information from HTML output. """
|
||||
|
||||
patterns = [re.compile(r'<!-- BUGTYPE (?P<bug_type>.*) -->$'),
|
||||
@@ -333,6 +343,7 @@ def parse_bug_html(filename):
|
||||
|
||||
|
||||
def parse_crash(filename):
|
||||
# type: (str) -> Dict[str, Any]
|
||||
""" Parse out the crash information from the report file. """
|
||||
|
||||
match = re.match(r'(.*)\.info\.txt', filename)
|
||||
@@ -350,11 +361,13 @@ def parse_crash(filename):
|
||||
|
||||
|
||||
def category_type_name(bug):
|
||||
# type: (Dict[str, Any]) -> str
|
||||
""" Create a new bug attribute from bug by category and type.
|
||||
|
||||
The result will be used as CSS class selector in the final report. """
|
||||
|
||||
def smash(key):
|
||||
# type: (str) -> str
|
||||
""" Make value ready to be HTML attribute value. """
|
||||
|
||||
return bug.get(key, '').lower().replace(' ', '_').replace("'", '')
|
||||
@@ -363,6 +376,7 @@ def category_type_name(bug):
|
||||
|
||||
|
||||
def duplicate_check(hash_function):
|
||||
# type: (Callable[[Any], str]) -> Callable[[Dict[str, Any]], bool]
|
||||
""" Workaround to detect duplicate dictionary values.
|
||||
|
||||
Python `dict` type has no `hash` method, which is required by the `set`
|
||||
@@ -375,23 +389,25 @@ def duplicate_check(hash_function):
|
||||
This method is a factory method, which returns a predicate. """
|
||||
|
||||
def predicate(entry):
|
||||
# type: (Dict[str, Any]) -> bool
|
||||
""" The predicate which calculates and stores the hash of the given
|
||||
entries. The entry type has to work with the given hash function.
|
||||
|
||||
:param entry: the questioned entry,
|
||||
:return: true/false depends the hash value is already seen or not.
|
||||
"""
|
||||
entry_hash = hash_function(entry)
|
||||
entry_hash = hash_function(entry) # type: str
|
||||
if entry_hash not in state:
|
||||
state.add(entry_hash)
|
||||
return False
|
||||
return True
|
||||
|
||||
state = set()
|
||||
state = set() # type: Set[str]
|
||||
return predicate
|
||||
|
||||
|
||||
def create_counters():
|
||||
# type () -> Callable[[Dict[str, Any]], None] FIXME
|
||||
""" Create counters for bug statistics.
|
||||
|
||||
Two entries are maintained: 'total' is an integer, represents the
|
||||
@@ -401,6 +417,7 @@ def create_counters():
|
||||
and 'label'. """
|
||||
|
||||
def predicate(bug):
|
||||
# type (Dict[str, Any]) -> None FIXME
|
||||
bug_category = bug['bug_category']
|
||||
bug_type = bug['bug_type']
|
||||
current_category = predicate.categories.get(bug_category, dict())
|
||||
@@ -414,13 +431,15 @@ def create_counters():
|
||||
predicate.categories.update({bug_category: current_category})
|
||||
predicate.total += 1
|
||||
|
||||
predicate.total = 0
|
||||
predicate.categories = dict()
|
||||
predicate.total = 0 # type: int
|
||||
predicate.categories = dict() # type: Dict[str, Any]
|
||||
return predicate
|
||||
|
||||
|
||||
def prettify_bug(prefix, output_dir):
|
||||
# type: (str, str) -> Callable[[Dict[str, Any]], Dict[str, str]]
|
||||
def predicate(bug):
|
||||
# type: (Dict[str, Any]) -> Dict[str, str]
|
||||
""" Make safe this values to embed into HTML. """
|
||||
|
||||
bug['bug_type_class'] = category_type_name(bug)
|
||||
@@ -435,7 +454,9 @@ def prettify_bug(prefix, output_dir):
|
||||
|
||||
|
||||
def prettify_crash(prefix, output_dir):
|
||||
# type: (str, str) -> Callable[[Dict[str, str]], Dict[str, str]]
|
||||
def predicate(crash):
|
||||
# type: (Dict[str, str]) -> Dict[str, str]
|
||||
""" Make safe this values to embed into HTML. """
|
||||
|
||||
encode_value(crash, 'source', lambda x: escape(chop(prefix, x)))
|
||||
@@ -449,6 +470,7 @@ def prettify_crash(prefix, output_dir):
|
||||
|
||||
|
||||
def copy_resource_files(output_dir):
|
||||
# type: (str) -> None
|
||||
""" Copy the javascript and css files to the report directory. """
|
||||
|
||||
this_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
@@ -457,6 +479,7 @@ def copy_resource_files(output_dir):
|
||||
|
||||
|
||||
def encode_value(container, key, encode):
|
||||
# type: (Dict[str, Any], str, Callable[[Any], Any]) -> None
|
||||
""" Run 'encode' on 'container[key]' value and update it. """
|
||||
|
||||
if key in container:
|
||||
@@ -465,12 +488,14 @@ def encode_value(container, key, encode):
|
||||
|
||||
|
||||
def chop(prefix, filename):
|
||||
# type: (str, str) -> str
|
||||
""" Create 'filename' from '/prefix/filename' """
|
||||
|
||||
return filename if not prefix else os.path.relpath(filename, prefix)
|
||||
|
||||
|
||||
def escape(text):
|
||||
# type: (str) -> str
|
||||
""" Paranoid HTML escape method. (Python version independent) """
|
||||
|
||||
escape_table = {
|
||||
@@ -484,6 +509,7 @@ def escape(text):
|
||||
|
||||
|
||||
def reindent(text, indent):
|
||||
# type: (str, int) -> str
|
||||
""" Utility function to format html output and keep indentation. """
|
||||
|
||||
result = ''
|
||||
@@ -494,6 +520,7 @@ def reindent(text, indent):
|
||||
|
||||
|
||||
def comment(name, opts=None):
|
||||
# type: (str, Dict[str, str]) -> str
|
||||
""" Utility function to format meta information as comment. """
|
||||
|
||||
attributes = ''
|
||||
@@ -505,6 +532,7 @@ def comment(name, opts=None):
|
||||
|
||||
|
||||
def commonprefix_from(filename):
|
||||
# type: (str) -> str
|
||||
""" Create file prefix from a compilation database entries. """
|
||||
|
||||
with open(filename, 'r') as handle:
|
||||
@@ -512,6 +540,7 @@ def commonprefix_from(filename):
|
||||
|
||||
|
||||
def commonprefix(files):
|
||||
# type: (Iterator[str]) -> str
|
||||
""" Fixed version of os.path.commonprefix.
|
||||
|
||||
:param files: list of file names.
|
||||
|
||||
@@ -239,7 +239,7 @@ class AnalyzerTest(unittest.TestCase):
|
||||
'source': 'test.java',
|
||||
'language': 'java'
|
||||
}
|
||||
self.assertIsNone(sut.language_check(input, spy.call))
|
||||
self.assertEquals(dict(), sut.language_check(input, spy.call))
|
||||
self.assertIsNone(spy.arg)
|
||||
|
||||
def test_set_language_sets_flags(self):
|
||||
@@ -283,7 +283,7 @@ class AnalyzerTest(unittest.TestCase):
|
||||
def stop(archs):
|
||||
spy = Spy()
|
||||
input = {'flags': [], 'arch_list': archs}
|
||||
self.assertIsNone(sut.arch_check(input, spy.call))
|
||||
self.assertEqual(dict(), sut.arch_check(input, spy.call))
|
||||
self.assertIsNone(spy.arg)
|
||||
|
||||
stop(['ppc'])
|
||||
|
||||
Reference in New Issue
Block a user