提交 9115ab1c 编写于 作者: Y Yu Yang 提交者: GitHub

Merge pull request #450 from reyoung/feature/pre-commit-hooks-scripts

Feature/pre commit hooks scripts
......@@ -13,8 +13,6 @@
# The document of clang-format is
# http://clang.llvm.org/docs/ClangFormat.html
# http://clang.llvm.org/docs/ClangFormatStyleOptions.html
#
# TODO(yuyang18): Add python and other language code style
---
Language: Cpp
BasedOnStyle: Google
......@@ -22,8 +20,9 @@ IndentWidth: 2
TabWidth: 2
ContinuationIndentWidth: 4
AccessModifierOffset: -2 # The private/protected/public has no indent in class
PointerAlignment: Left # int* p/int& p, not int *p/int &p
Standard: Cpp11
AllowAllParametersOfDeclarationOnNextLine: true
BinPackParameters: false
BinPackArguments: false
...
- repo: https://github.com/Lucas-C/pre-commit-hooks.git
sha: c25201a00e6b0514370501050cf2a8538ac12270
hooks:
- id: remove-crlf
- repo: https://github.com/reyoung/mirrors-yapf.git
sha: v0.13.2
hooks:
- id: yapf
- repo: https://github.com/pre-commit/pre-commit-hooks
sha: 4ef03c4223ad322c7adaa6c6c0efb26b57df3b71
hooks:
- id: check-added-large-files
- id: check-merge-conflict
- id: check-symlinks
- id: detect-private-key
- id: end-of-file-fixer
# TODO(yuyang): trailing whitespace has some bugs on markdown
# files now, please not add it to pre-commit hook now
# - id: trailing-whitespace
#
# TODO(yuyang): debug-statements not fit for Paddle, because
# not all of our python code is runnable. Some are used for
# documenation
# - id: debug-statements
This folder contains scripts used in PaddlePaddle introduction.
- use `bash train.sh` to train a simple linear regression model
- use `python evaluate_model.py` to read model parameters. You can see that `w` and `b` are very close to [2, 0.3].
......@@ -19,4 +19,3 @@ done
cd $DIR
rm -f *.list
python generate_list.py
......@@ -14,4 +14,3 @@
"fields": ["id", "title", "genres"]
}
}
......@@ -37,4 +37,3 @@ paddle train \
--use_gpu=false \
--config_args=is_test=1 \
2>&1 | tee 'test.log'
......@@ -24,4 +24,3 @@ paddle train \
--show_parameter_stats_period=10 \
--test_all_data_in_one_period=1 \
2>&1 | tee 'train.log'
......@@ -98,4 +98,3 @@ There, you have recovered the underlying pattern between `X` and `Y` only from o
- <a href="../build/index.html"> Build and Installation </a>
- <a href="../demo/quick_start/index_en.html">Quick Start</a>
- <a href="../demo/index.html">Example and Demo</a>
......@@ -17,5 +17,3 @@ endif()
if(WITH_SWIG_PY)
add_subdirectory(api)
endif()
......@@ -65,4 +65,3 @@ struct ArgumentsPrivate {
return *(std::shared_ptr<T>*)(rawPtr);
}
};
......@@ -69,8 +69,8 @@ class TestMatrix(unittest.TestCase):
def test_numpy(self):
numpy_mat = np.matrix([[1, 2], [3, 4], [5, 6]], dtype="float32")
m = swig_paddle.Matrix.createCpuDenseFromNumpy(numpy_mat)
self.assertEqual(
(int(m.getHeight()), int(m.getWidth())), numpy_mat.shape)
self.assertEqual((int(m.getHeight()), int(m.getWidth())),
numpy_mat.shape)
# the numpy matrix and paddle matrix shared the same memory.
numpy_mat[0, 1] = 342.23
......
......@@ -254,4 +254,3 @@ extern __thread cudaStream_t default_stream;
#endif /* __NVCC__ */
#endif /* HL_BASE_H_ */
......@@ -199,4 +199,3 @@ inline void hl_batch_norm_backward(hl_tensor_descriptor inputDesc,
real *savedInvVar) {}
#endif // HL_CUDA_CUDNN_STUB_H_
......@@ -718,4 +718,3 @@ void sincos256_ps(v8sf x, v8sf *s, v8sf *c) {
*s = _mm256_xor_ps(xmm1, sign_bit_sin);
*c = _mm256_xor_ps(xmm2, sign_bit_cos);
}
......@@ -48,4 +48,3 @@ public:
};
} // namespace paddle
......@@ -80,4 +80,3 @@ void vTanh(const int n, const T* a, T* r);
} // namespace paddle
#endif // MATHFUNCTIONS_H_
......@@ -27,7 +27,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Does google-lint on c++ files.
The goal of this script is to identify places in the code that *may*
......@@ -55,7 +54,6 @@ import string
import sys
import unicodedata
_USAGE = """
Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...]
[--counting=total|toplevel|detailed] [--root=subdir]
......@@ -242,13 +240,11 @@ _ERROR_CATEGORIES = [
'whitespace/semicolon',
'whitespace/tab',
'whitespace/todo',
]
]
# These error categories are no longer enforced by cpplint, but for backwards-
# compatibility they may still appear in NOLINT comments.
_LEGACY_ERROR_CATEGORIES = [
'readability/streams',
]
_LEGACY_ERROR_CATEGORIES = ['readability/streams', ]
# The default state of the category filter. This is overridden by the --filter=
# flag. By default all errors are on, so only add here categories that should be
......@@ -394,8 +390,7 @@ _CPP_HEADERS = frozenset([
'cuchar',
'cwchar',
'cwctype',
])
])
# These headers are excluded from [build/include] and [build/include_order]
# checks:
......@@ -405,23 +400,26 @@ _CPP_HEADERS = frozenset([
_THIRD_PARTY_HEADERS_PATTERN = re.compile(
r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$')
# Assertion macros. These are defined in base/logging.h and
# testing/base/gunit.h. Note that the _M versions need to come first
# for substring matching to work.
_CHECK_MACROS = [
'DCHECK', 'CHECK',
'EXPECT_TRUE_M', 'EXPECT_TRUE',
'ASSERT_TRUE_M', 'ASSERT_TRUE',
'EXPECT_FALSE_M', 'EXPECT_FALSE',
'ASSERT_FALSE_M', 'ASSERT_FALSE',
]
'DCHECK',
'CHECK',
'EXPECT_TRUE_M',
'EXPECT_TRUE',
'ASSERT_TRUE_M',
'ASSERT_TRUE',
'EXPECT_FALSE_M',
'EXPECT_FALSE',
'ASSERT_FALSE_M',
'ASSERT_FALSE',
]
# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
_CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS])
for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
('>=', 'GE'), ('>', 'GT'),
for op, replacement in [('==', 'EQ'), ('!=', 'NE'), ('>=', 'GE'), ('>', 'GT'),
('<=', 'LE'), ('<', 'LT')]:
_CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
_CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
......@@ -430,9 +428,8 @@ for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
_CHECK_REPLACEMENT['EXPECT_TRUE_M'][op] = 'EXPECT_%s_M' % replacement
_CHECK_REPLACEMENT['ASSERT_TRUE_M'][op] = 'ASSERT_%s_M' % replacement
for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
('>=', 'LT'), ('>', 'LE'),
('<=', 'GT'), ('<', 'GE')]:
for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'), ('>=', 'LT'),
('>', 'LE'), ('<=', 'GT'), ('<', 'GE')]:
_CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
_CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
_CHECK_REPLACEMENT['EXPECT_FALSE_M'][op] = 'EXPECT_%s_M' % inv_replacement
......@@ -455,16 +452,15 @@ _ALT_TOKEN_REPLACEMENT = {
'xor_eq': '^=',
'not': '!',
'not_eq': '!='
}
}
# Compile regular expression that matches all the above keywords. The "[ =()]"
# bit is meant to avoid matching these keywords outside of boolean expressions.
#
# False positives include C-style multi-line comments and multi-line strings
# but those have always been troublesome for cpplint.
_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(r'[ =()](' + ('|'.join(
_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
# These constants define types of headers for use with
# _IncludeState.CheckNextIncludeOrder().
......@@ -485,7 +481,6 @@ _MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)'
r'(?:\s+(volatile|__volatile__))?'
r'\s*[{(]')
_regexp_compile_cache = {}
# {str, set(int)}: a map from error categories to sets of linenumbers
......@@ -504,6 +499,7 @@ _line_length = 80
# This is set by --extensions flag.
_valid_extensions = set(['cc', 'h', 'cpp', 'cu', 'cuh'])
def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of error-suppressions.
......@@ -521,9 +517,9 @@ def ParseNolintSuppressions(filename, raw_line, linenum, error):
if matched:
if matched.group(1):
lines = matched.group(2)
if lines :
lines=int(lines[2:])
suppressed_line = [ linenum + i for i in xrange(lines) ]
if lines:
lines = int(lines[2:])
suppressed_line = [linenum + i for i in xrange(lines)]
else:
suppressed_line = linenum + 1
else:
......@@ -540,10 +536,12 @@ def ParseNolintSuppressions(filename, raw_line, linenum, error):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
if isinstance(suppressed_line, int):
_error_suppressions.setdefault(category, set()).add(suppressed_line)
_error_suppressions.setdefault(
category, set()).add(suppressed_line)
else:
for _line in suppressed_line:
_error_suppressions.setdefault(category, set()).add(_line)
_error_suppressions.setdefault(category,
set()).add(_line)
elif category not in _LEGACY_ERROR_CATEGORIES:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category)
......@@ -730,9 +728,8 @@ class _IncludeState(object):
error message describing what's wrong.
"""
error_message = ('Found %s after %s' %
(self._TYPE_NAMES[header_type],
self._SECTION_NAMES[self._section]))
error_message = ('Found %s after %s' % (
self._TYPE_NAMES[header_type], self._SECTION_NAMES[self._section]))
last_section = self._section
......@@ -828,7 +825,8 @@ class _CppLintState(object):
self.filters.append(clean_filt)
for filt in self.filters:
if not (filt.startswith('+') or filt.startswith('-')):
raise ValueError('Every filter in --filters must start with + or -'
raise ValueError(
'Every filter in --filters must start with + or -'
' (%s does not)' % filt)
def BackupFilters(self):
......@@ -861,6 +859,7 @@ class _CppLintState(object):
(category, count))
sys.stdout.write('Total errors found: %d\n' % self.error_count)
_cpplint_state = _CppLintState()
......@@ -906,6 +905,7 @@ def _SetFilters(filters):
"""
_cpplint_state.SetFilters(filters)
def _AddFilters(filters):
"""Adds more filter overrides.
......@@ -918,14 +918,17 @@ def _AddFilters(filters):
"""
_cpplint_state.AddFilters(filters)
def _BackupFilters():
""" Saves the current filter list to backup storage."""
_cpplint_state.BackupFilters()
def _RestoreFilters():
""" Restores filters previously backed up."""
_cpplint_state.RestoreFilters()
class _FunctionState(object):
"""Tracks current function name and the number of lines in its body."""
......@@ -967,7 +970,8 @@ class _FunctionState(object):
trigger = base_trigger * 2**_VerboseLevel()
if self.lines_in_function > trigger:
error_level = int(math.log(self.lines_in_function / base_trigger, 2))
error_level = int(
math.log(self.lines_in_function / base_trigger, 2))
# 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
if error_level > 5:
error_level = 5
......@@ -1058,7 +1062,7 @@ class FileInfo(object):
googlename = self.RepositoryName()
project, rest = os.path.split(googlename)
return (project,) + os.path.splitext(rest)
return (project, ) + os.path.splitext(rest)
def BaseName(self):
"""File base name - text after the final slash, before the final period."""
......@@ -1130,14 +1134,14 @@ def Error(filename, linenum, category, confidence, message):
if _ShouldPrintError(category, confidence, linenum):
_cpplint_state.IncrementErrorCount(category)
if _cpplint_state.output_format == 'vs7':
sys.stderr.write('%s(%s): %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
sys.stderr.write('%s(%s): %s [%s] [%d]\n' %
(filename, linenum, message, category, confidence))
elif _cpplint_state.output_format == 'eclipse':
sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' %
(filename, linenum, message, category, confidence))
else:
sys.stderr.write('%s:%s: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
sys.stderr.write('%s:%s: %s [%s] [%d]\n' %
(filename, linenum, message, category, confidence))
# Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard.
......@@ -1154,9 +1158,8 @@ _RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/'
# if this doesn't work we try on left side but only if there's a non-character
# on the right.
_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' +
_RE_PATTERN_C_COMMENTS + r'\s+|' +
r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' +
r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' + _RE_PATTERN_C_COMMENTS +
r'\s+|' + r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' +
_RE_PATTERN_C_COMMENTS + r')')
......@@ -1208,7 +1211,8 @@ def CleanseRawStrings(raw_lines):
# line and resume copying the original lines, and also insert
# a "" on the last line.
leading_space = Match(r'^(\s*)\S', line)
line = leading_space.group(1) + '""' + line[end + len(delimiter):]
line = leading_space.group(1) + '""' + line[end + len(
delimiter):]
delimiter = None
else:
# Haven't found the end yet, append a blank line.
......@@ -1220,7 +1224,8 @@ def CleanseRawStrings(raw_lines):
while delimiter is None:
# Look for beginning of a raw string.
# See 2.14.15 [lex.string] for syntax.
matched = Match(r'^(.*)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line)
matched = Match(r'^(.*)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$',
line)
if matched:
delimiter = ')' + matched.group(2) + '"'
......@@ -1280,8 +1285,8 @@ def RemoveMultiLineComments(filename, lines, error):
return
lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
if lineix_end >= len(lines):
error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
'Could not find end of multi-line comment')
error(filename, lineix_begin + 1, 'readability/multiline_comment',
5, 'Could not find end of multi-line comment')
return
RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
lineix = lineix_end + 1
......@@ -1321,9 +1326,10 @@ class CleansedLines(object):
self.num_lines = len(lines)
self.lines_without_raw_strings = CleanseRawStrings(lines)
for linenum in range(len(self.lines_without_raw_strings)):
self.lines.append(CleanseComments(
self.lines_without_raw_strings[linenum]))
elided = self._CollapseStrings(self.lines_without_raw_strings[linenum])
self.lines.append(
CleanseComments(self.lines_without_raw_strings[linenum]))
elided = self._CollapseStrings(self.lines_without_raw_strings[
linenum])
self.elided.append(CleanseComments(elided))
def NumLines(self):
......@@ -1382,7 +1388,8 @@ class CleansedLines(object):
# separator. So we are fine as long as we don't see something
# like "0.'3" (gcc 4.9.0 will not allow this literal).
if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head):
match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail)
match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$',
"'" + tail)
collapsed += head + match_literal.group(1).replace("'", '')
elided = match_literal.group(2)
else:
......@@ -1452,8 +1459,8 @@ def FindEndOfExpressionInLine(line, startpos, stack):
# Found potential end of template argument list.
# Ignore "->" and operator functions
if (i > 0 and
(line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))):
if (i > 0 and (line[i - 1] == '-' or Search(r'\boperator\s*$',
line[0:i - 1]))):
continue
# Pop the stack if there is a matching '<'. Otherwise, ignore
......@@ -1547,8 +1554,7 @@ def FindStartOfExpressionInLine(line, endpos, stack):
#
# Ignore it if it's a "->" or ">=" or "operator>"
if (i > 0 and
(line[i - 1] == '-' or
Match(r'\s>=\s', line[i - 1:]) or
(line[i - 1] == '-' or Match(r'\s>=\s', line[i - 1:]) or
Search(r'\boperator\s*$', line[0:i]))):
i -= 1
else:
......@@ -1627,7 +1633,8 @@ def ReverseCloseExpression(clean_lines, linenum, pos):
while stack and linenum > 0:
linenum -= 1
line = clean_lines.elided[linenum]
(start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack)
(start_pos, stack) = FindStartOfExpressionInLine(line,
len(line) - 1, stack)
if start_pos > -1:
return (line, linenum, start_pos)
......@@ -1643,8 +1650,7 @@ def CheckForCopyright(filename, lines, error):
for line in xrange(1, min(len(lines), 11)):
if re.search(r'Copyright', lines[line], re.I): break
else: # means no copyright line was found
error(filename, 0, 'legal/copyright', 5,
'No copyright message found. '
error(filename, 0, 'legal/copyright', 5, 'No copyright message found. '
'You should have a line: "Copyright [year] <Copyright Owner>"')
......@@ -1741,8 +1747,8 @@ def CheckForHeaderGuard(filename, clean_lines, error):
if ifndef != cppvar + '_':
error_level = 5
ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
error)
ParseNolintSuppressions(filename, raw_lines[ifndef_linenum],
ifndef_linenum, error)
error(filename, ifndef_linenum, 'build/header_guard', error_level,
'#ifndef header guard has wrong style, please use: %s' % cppvar)
......@@ -1763,7 +1769,8 @@ def CheckForHeaderGuard(filename, clean_lines, error):
no_single_line_comments = True
for i in xrange(1, len(raw_lines) - 1):
line = raw_lines[i]
if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//',
line):
no_single_line_comments = False
break
......@@ -1825,10 +1832,13 @@ def CheckForBadCharacters(filename, lines, error):
"""
for linenum, line in enumerate(lines):
if u'\ufffd' in line:
error(filename, linenum, 'readability/utf8', 5,
'Line contains invalid UTF-8 (or Unicode replacement character).')
error(
filename, linenum, 'readability/utf8', 5,
'Line contains invalid UTF-8 (or Unicode replacement character).'
)
if '\0' in line:
error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
error(filename, linenum, 'readability/nul', 5,
'Line contains NUL byte.')
def CheckForNewlineAtEOF(filename, lines, error):
......@@ -1845,7 +1855,8 @@ def CheckForNewlineAtEOF(filename, lines, error):
# To verify that the file ends in \n, we just have to make sure the
# last-but-two element of lines() exists and is empty.
if len(lines) < 3 or lines[-2]:
error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
error(filename,
len(lines) - 2, 'whitespace/ending_newline', 5,
'Could not find a newline character at the end of the file.')
......@@ -1911,10 +1922,8 @@ _THREADING_LIST = (
('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\([^)]+\)'),
('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\([^)]+\)'),
('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\(\)'),
('strtok(', 'strtok_r(',
_UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'),
('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'),
)
('strtok(', 'strtok_r(', _UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'),
('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'), )
def CheckPosixThreading(filename, clean_lines, linenum, error):
......@@ -1938,9 +1947,8 @@ def CheckPosixThreading(filename, clean_lines, linenum, error):
# function we are looking for
if Search(pattern, line):
error(filename, linenum, 'runtime/threadsafe_fn', 2,
'Consider using ' + multithread_safe_func +
'...) instead of ' + single_thread_func +
'...) for improved thread safety.')
'Consider using ' + multithread_safe_func + '...) instead of '
+ single_thread_func + '...) for improved thread safety.')
def CheckVlogArguments(filename, clean_lines, linenum, error):
......@@ -1961,10 +1969,10 @@ def CheckVlogArguments(filename, clean_lines, linenum, error):
'VLOG() should be used with numeric verbosity level. '
'Use LOG() if you want symbolic severity levels.')
# Matches invalid increment: *count++, which moves pointer instead of
# incrementing a value.
_RE_PATTERN_INVALID_INCREMENT = re.compile(
r'^\s*\*\w+(\+\+|--);')
_RE_PATTERN_INVALID_INCREMENT = re.compile(r'^\s*\*\w+(\+\+|--);')
def CheckInvalidIncrement(filename, clean_lines, linenum, error):
......@@ -1985,7 +1993,8 @@ def CheckInvalidIncrement(filename, clean_lines, linenum, error):
"""
line = clean_lines.elided[linenum]
if _RE_PATTERN_INVALID_INCREMENT.match(line):
error(filename, linenum, 'runtime/invalid_increment', 5,
error(
filename, linenum, 'runtime/invalid_increment', 5,
'Changing pointer instead of value (or unused value of operator*).')
......@@ -2104,13 +2113,13 @@ class _ClassInfo(_BlockInfo):
seen_last_thing_in_class = False
for i in xrange(linenum - 1, self.starting_linenum, -1):
match = Search(
r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' +
self.name + r'\)',
clean_lines.elided[i])
r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\('
+ self.name + r'\)', clean_lines.elided[i])
if match:
if seen_last_thing_in_class:
error(filename, i, 'readability/constructors', 3,
match.group(1) + ' should be the last thing in the class')
match.group(1) +
' should be the last thing in the class')
break
if not Match(r'^\s*$', clean_lines.elided[i]):
......@@ -2126,7 +2135,8 @@ class _ClassInfo(_BlockInfo):
else:
parent = 'class ' + self.name
error(filename, linenum, 'whitespace/indent', 3,
'Closing brace should be aligned with beginning of %s' % parent)
'Closing brace should be aligned with beginning of %s' %
parent)
class _NamespaceInfo(_BlockInfo):
......@@ -2153,8 +2163,8 @@ class _NamespaceInfo(_BlockInfo):
# other than forward declarations). There is currently no logic on
# deciding what these nontrivial things are, so this check is
# triggered by namespace size only, which works most of the time.
if (linenum - self.starting_linenum < 10
and not Match(r'};*\s*(//|/\*).*\bnamespace\b', line)):
if (linenum - self.starting_linenum < 10 and
not Match(r'};*\s*(//|/\*).*\bnamespace\b', line)):
return
# Look for matching comment at end of namespace.
......@@ -2171,9 +2181,8 @@ class _NamespaceInfo(_BlockInfo):
# expected namespace.
if self.name:
# Named namespace
if not Match((r'};*\s*(//|/\*).*\bnamespace\s+' + re.escape(self.name) +
r'[\*/\.\\\s]*$'),
line):
if not Match((r'};*\s*(//|/\*).*\bnamespace\s+' +
re.escape(self.name) + r'[\*/\.\\\s]*$'), line):
error(filename, linenum, 'readability/namespace', 5,
'Namespace should be terminated with "// namespace %s"' %
self.name)
......@@ -2182,13 +2191,17 @@ class _NamespaceInfo(_BlockInfo):
if not Match(r'};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line):
# If "// namespace anonymous" or "// anonymous namespace (more text)",
# mention "// anonymous namespace" as an acceptable form
if Match(r'}.*\b(namespace anonymous|anonymous namespace)\b', line):
error(filename, linenum, 'readability/namespace', 5,
if Match(r'}.*\b(namespace anonymous|anonymous namespace)\b',
line):
error(
filename, linenum, 'readability/namespace', 5,
'Anonymous namespace should be terminated with "// namespace"'
' or "// anonymous namespace"')
else:
error(filename, linenum, 'readability/namespace', 5,
'Anonymous namespace should be terminated with "// namespace"')
error(
filename, linenum, 'readability/namespace', 5,
'Anonymous namespace should be terminated with "// namespace"'
)
class _PreprocessorInfo(object):
......@@ -2316,7 +2329,8 @@ class NestingState(object):
# We can't be sure if we just find a single '<', and need to
# find the matching '>'.
(_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1)
(_, end_line, end_pos) = CloseExpression(clean_lines, linenum,
pos - 1)
if end_pos < 0:
# Not sure if template argument list or syntax error in file
return False
......@@ -2357,7 +2371,8 @@ class NestingState(object):
# whole nesting stack up to this point. This is what we
# keep after the #endif.
self.pp_stack[-1].seen_else = True
self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack)
self.pp_stack[-1].stack_before_else = copy.deepcopy(
self.stack)
# Restore the stack to how it was before the #if
self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if)
......@@ -2414,8 +2429,7 @@ class NestingState(object):
# Also check if we are starting or ending an inline assembly block.
if inner_block.inline_asm in (_NO_ASM, _END_ASM):
if (depth_change != 0 and
inner_block.open_parentheses == 1 and
if (depth_change != 0 and inner_block.open_parentheses == 1 and
_MATCH_ASM.match(line)):
# Enter assembly block
inner_block.inline_asm = _INSIDE_ASM
......@@ -2436,11 +2450,13 @@ class NestingState(object):
# declarations even if it weren't followed by a whitespace, this
# is so that we don't confuse our namespace checker. The
# missing spaces will be flagged by CheckSpacing.
namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$',
line)
if not namespace_decl_match:
break
new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
new_namespace = _NamespaceInfo(
namespace_decl_match.group(1), linenum)
self.stack.append(new_namespace)
line = namespace_decl_match.group(2)
......@@ -2469,10 +2485,12 @@ class NestingState(object):
# an unmatched '>'. If we see one, assume we are inside a
# template argument list.
end_declaration = len(class_decl_match.group(1))
if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration):
self.stack.append(_ClassInfo(
class_decl_match.group(3), class_decl_match.group(2),
clean_lines, linenum))
if not self.InTemplateArgumentList(clean_lines, linenum,
end_declaration):
self.stack.append(
_ClassInfo(
class_decl_match.group(3),
class_decl_match.group(2), clean_lines, linenum))
line = class_decl_match.group(4)
# If we have not yet seen the opening brace for the innermost block,
......@@ -2485,8 +2503,7 @@ class NestingState(object):
classinfo = self.stack[-1]
access_match = Match(
r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
r':(?:[^:]|$)',
line)
r':(?:[^:]|$)', line)
if access_match:
classinfo.access = access_match.group(2)
......@@ -2541,7 +2558,8 @@ class NestingState(object):
else: # token == '}'
# Perform end of block checks and pop the stack.
if self.stack:
self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
self.stack[-1].CheckEnd(filename, clean_lines, linenum,
error)
self.stack.pop()
line = matched.group(2)
......@@ -2579,8 +2597,8 @@ class NestingState(object):
obj.name)
def CheckForNonStandardConstructs(filename, clean_lines, linenum,
nesting_state, error):
def CheckForNonStandardConstructs(filename, clean_lines, linenum, nesting_state,
error):
r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
Complain about several constructs which gcc-2 accepts, but which are
......@@ -2632,8 +2650,7 @@ def CheckForNonStandardConstructs(filename, clean_lines, linenum,
if Search(r'\b(const|volatile|void|char|short|int|long'
r'|float|double|signed|unsigned'
r'|schar|u?int8|u?int16|u?int32|u?int64)'
r'\s+(register|static|extern|typedef)\b',
line):
r'\s+(register|static|extern|typedef)\b', line):
error(filename, linenum, 'build/storage_class', 5,
'Storage class (static, extern, typedef, etc) should be first.')
......@@ -2642,12 +2659,14 @@ def CheckForNonStandardConstructs(filename, clean_lines, linenum,
'Uncommented text after #endif is non-standard. Use a comment.')
if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
error(filename, linenum, 'build/forward_decl', 5,
error(
filename, linenum, 'build/forward_decl', 5,
'Inner-style forward declarations are invalid. Remove this line.')
if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
line):
error(filename, linenum, 'build/deprecated', 3,
error(
filename, linenum, 'build/deprecated', 3,
'>? and <? (max and min) operators are non-standard and deprecated.')
if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
......@@ -2679,9 +2698,7 @@ def CheckForNonStandardConstructs(filename, clean_lines, linenum,
# strongly suggest something is wrong.
explicit_constructor_match = Match(
r'\s+(?:inline\s+)?(explicit\s+)?(?:inline\s+)?%s\s*'
r'\(((?:[^()]|\([^()]*\))*)\)'
% re.escape(base_classname),
line)
r'\(((?:[^()]|\([^()]*\))*)\)' % re.escape(base_classname), line)
if explicit_constructor_match:
is_marked_explicit = explicit_constructor_match.group(1)
......@@ -2704,38 +2721,40 @@ def CheckForNonStandardConstructs(filename, clean_lines, linenum,
i += 1
defaulted_args = [arg for arg in constructor_args if '=' in arg]
noarg_constructor = (not constructor_args or # empty arg list
noarg_constructor = (
not constructor_args or # empty arg list
# 'void' arg specifier
(len(constructor_args) == 1 and
constructor_args[0].strip() == 'void'))
onearg_constructor = ((len(constructor_args) == 1 and # exactly one arg
onearg_constructor = (
(
len(constructor_args) == 1 and # exactly one arg
not noarg_constructor) or
# all but at most one arg defaulted
(len(constructor_args) >= 1 and
not noarg_constructor and
(len(constructor_args) >= 1 and not noarg_constructor and
len(defaulted_args) >= len(constructor_args) - 1))
initializer_list_constructor = bool(
onearg_constructor and
Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0]))
copy_constructor = bool(
onearg_constructor and
Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&'
% re.escape(base_classname), constructor_args[0].strip()))
Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&' %
re.escape(base_classname), constructor_args[0].strip()))
if (not is_marked_explicit and
onearg_constructor and
not initializer_list_constructor and
not copy_constructor):
if (not is_marked_explicit and onearg_constructor and
not initializer_list_constructor and not copy_constructor):
if defaulted_args:
error(filename, linenum, 'runtime/explicit', 5,
'Constructors callable with one argument '
'should be marked explicit.')
else:
error(filename, linenum, 'runtime/explicit', 5,
error(
filename, linenum, 'runtime/explicit', 5,
'Single-parameter constructors should be marked explicit.')
elif is_marked_explicit and not onearg_constructor:
if noarg_constructor:
error(filename, linenum, 'runtime/explicit', 5,
error(
filename, linenum, 'runtime/explicit', 5,
'Zero-parameter constructors should not be marked explicit.')
else:
error(filename, linenum, 'runtime/explicit', 0,
......@@ -2759,10 +2778,8 @@ def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error):
# first see if we should be looking inside such an expression for a
# function call, to which we can apply more strict standards.
fncall = line # if there's no control flow construct, look at whole line
for pattern in (r'\bif\s*\((.*)\)\s*{',
r'\bfor\s*\((.*)\)\s*{',
r'\bwhile\s*\((.*)\)\s*[{;]',
r'\bswitch\s*\((.*)\)\s*{'):
for pattern in (r'\bif\s*\((.*)\)\s*{', r'\bfor\s*\((.*)\)\s*{',
r'\bwhile\s*\((.*)\)\s*[{;]', r'\bswitch\s*\((.*)\)\s*{'):
match = Search(pattern, line)
if match:
fncall = match.group(1) # look inside the parens for function calls
......@@ -2782,7 +2799,8 @@ def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error):
# Note that we assume the contents of [] to be short enough that
# they'll never need to wrap.
if ( # Ignore control structures.
not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
not Search(
r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
fncall) and
# Ignore pointers/references to functions.
not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
......@@ -2844,12 +2862,12 @@ def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
clean_lines.elided, line):
CheckItemIndentationInNamespace(filename, clean_lines.elided,
line, error)
CheckItemIndentationInNamespace(filename, clean_lines.elided, line,
error)
def CheckForFunctionLengths(filename, clean_lines, linenum,
function_state, error):
def CheckForFunctionLengths(filename, clean_lines, linenum, function_state,
error):
"""Reports for long function bodies.
For an overview why this is done, see:
......@@ -2891,7 +2909,8 @@ def CheckForFunctionLengths(filename, clean_lines, linenum,
for start_linenum in xrange(linenum, clean_lines.NumLines()):
start_line = lines[start_linenum]
joined_line += ' ' + start_line.lstrip()
if Search(r'(;|})', start_line): # Declarations and trivial functions
if Search(r'(;|})',
start_line): # Declarations and trivial functions
body_found = True
break # ... ignore
elif Search(r'{', start_line):
......@@ -2933,14 +2952,14 @@ def CheckComment(line, filename, linenum, next_line_start, error):
if commentpos != -1:
# Check if the // may be in quotes. If so, ignore it
# Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison
if (line.count('"', 0, commentpos) -
line.count('\\"', 0, commentpos)) % 2 == 0: # not in quotes
if (line.count('"', 0, commentpos) - line.count('\\"', 0, commentpos)
) % 2 == 0: # not in quotes
# Allow one space for new scopes, two spaces otherwise:
if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos) and
((commentpos >= 1 and
line[commentpos-1] not in string.whitespace) or
if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos)
and ((commentpos >= 1 and
line[commentpos - 1] not in string.whitespace) or
(commentpos >= 2 and
line[commentpos-2] not in string.whitespace))):
line[commentpos - 2] not in string.whitespace))):
error(filename, linenum, 'whitespace/comments', 2,
'At least two spaces is best between code and comments')
......@@ -3044,8 +3063,7 @@ def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
#
# Also skip blank line checks for 'extern "C"' blocks, which are formatted
# like namespaces.
if (IsBlankLine(line) and
not nesting_state.InNamespaceBody() and
if (IsBlankLine(line) and not nesting_state.InNamespaceBody() and
not nesting_state.InExternC()):
elided = clean_lines.elided
prev_line = elided[linenum - 1]
......@@ -3066,12 +3084,12 @@ def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
if Match(r' {6}\w', prev_line): # Initializer list?
# We are looking for the opening column of initializer list, which
# should be indented 4 spaces to cause 6 space indentation afterwards.
search_position = linenum-2
while (search_position >= 0
and Match(r' {6}\w', elided[search_position])):
search_position = linenum - 2
while (search_position >= 0 and
Match(r' {6}\w', elided[search_position])):
search_position -= 1
exception = (search_position >= 0
and elided[search_position][:5] == ' :')
exception = (search_position >= 0 and
elided[search_position][:5] == ' :')
else:
# Search for the function arguments or an initializer list. We use a
# simple heuristic here: If the line is indented 4 spaces; and we have a
......@@ -3080,8 +3098,7 @@ def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
# a function header. If we have a colon indented 4 spaces, it is an
# initializer list.
exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)',
prev_line)
or Match(r' {4}:', prev_line))
prev_line) or Match(r' {4}:', prev_line))
if not exception:
error(filename, linenum, 'whitespace/blank_line', 2,
......@@ -3097,9 +3114,8 @@ def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
# }
if linenum + 1 < clean_lines.NumLines():
next_line = raw[linenum + 1]
if (next_line
and Match(r'\s*}', next_line)
and next_line.find('} else ') == -1):
if (next_line and Match(r'\s*}', next_line) and
next_line.find('} else ') == -1):
error(filename, linenum, 'whitespace/blank_line', 3,
'Redundant blank line at the end of a code block '
'should be deleted.')
......@@ -3122,8 +3138,7 @@ def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
# You shouldn't have spaces before your brackets, except maybe after
# 'delete []' or 'return []() {};'
if Search(r'\w\s+\[', line) and not Search(r'(?:delete|return)\s+\[', line):
error(filename, linenum, 'whitespace/braces', 5,
'Extra space before [')
error(filename, linenum, 'whitespace/braces', 5, 'Extra space before [')
# In range-based for, we wanted spaces before and after the colon, but
# not around "::" tokens that might appear.
......@@ -3162,11 +3177,11 @@ def CheckOperatorSpacing(filename, clean_lines, linenum, error):
# sometimes people put non-spaces on one side when aligning ='s among
# many lines (not that this is behavior that I approve of...)
if ((Search(r'[\w.]=', line) or
Search(r'=[\w.]', line))
and not Search(r'\b(if|while|for) ', line)
Search(r'=[\w.]', line)) and not Search(r'\b(if|while|for) ', line)
# Operators taken from [lex.operators] in C++11 standard.
and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line)
and not Search(r'operator=', line)):
and
not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line) and
not Search(r'operator=', line)):
error(filename, linenum, 'whitespace/operators', 4,
'Missing spaces around =')
......@@ -3196,8 +3211,8 @@ def CheckOperatorSpacing(filename, clean_lines, linenum, error):
# space. This is done to avoid some false positives with shifts.
match = Match(r'^(.*[^\s<])<[^\s=<,]', line)
if match:
(_, _, end_pos) = CloseExpression(
clean_lines, linenum, len(match.group(1)))
(_, _, end_pos) = CloseExpression(clean_lines, linenum,
len(match.group(1)))
if end_pos <= -1:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around <')
......@@ -3207,8 +3222,8 @@ def CheckOperatorSpacing(filename, clean_lines, linenum, error):
# false positives with shifts.
match = Match(r'^(.*[^-\s>])>[^\s=>,]', line)
if match:
(_, _, start_pos) = ReverseCloseExpression(
clean_lines, linenum, len(match.group(1)))
(_, _, start_pos) = ReverseCloseExpression(clean_lines, linenum,
len(match.group(1)))
if start_pos <= -1:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around >')
......@@ -3218,8 +3233,10 @@ def CheckOperatorSpacing(filename, clean_lines, linenum, error):
#
# We also allow operators following an opening parenthesis, since
# those tend to be macros that deal with operators.
match = Search(r'(operator|[^\s(<])(?:L|UL|ULL|l|ul|ull)?<<([^\s,=<])', line)
if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and
match = Search(r'(operator|[^\s(<])(?:L|UL|ULL|l|ul|ull)?<<([^\s,=<])',
line)
if (match and
not (match.group(1).isdigit() and match.group(2).isdigit()) and
not (match.group(1) == 'operator' and match.group(2) == ';')):
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around <<')
......@@ -3271,8 +3288,7 @@ def CheckParenthesisSpacing(filename, clean_lines, linenum, error):
# We don't want: "if ( foo)" or "if ( foo )".
# Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
match = Search(r'\b(if|for|while|switch)\s*'
r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$',
line)
r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$', line)
if match:
if len(match.group(2)) != len(match.group(4)):
if not (match.group(3) == ';' and
......@@ -3310,8 +3326,7 @@ def CheckCommaSpacing(filename, clean_lines, linenum, error):
# elided comments.
if (Search(r',[^,\s]', ReplaceAll(r'\boperator\s*,\s*\(', 'F(', line)) and
Search(r',[^,\s]', raw[linenum])):
error(filename, linenum, 'whitespace/comma', 3,
'Missing space after ,')
error(filename, linenum, 'whitespace/comma', 3, 'Missing space after ,')
# You should always have a space after a semicolon
# except for few corner cases
......@@ -3367,8 +3382,8 @@ def CheckBracesSpacing(filename, clean_lines, linenum, error):
# There is a false negative with this approach if people inserted
# spurious semicolons, e.g. "if (cond){};", but we will catch the
# spurious semicolon with a separate check.
(endline, endlinenum, endpos) = CloseExpression(
clean_lines, linenum, len(match.group(1)))
(endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum,
len(match.group(1)))
trailing_text = ''
if endpos > -1:
trailing_text = endline[endpos:]
......@@ -3391,11 +3406,11 @@ def CheckBracesSpacing(filename, clean_lines, linenum, error):
error(filename, linenum, 'whitespace/semicolon', 5,
'Semicolon defining empty statement. Use {} instead.')
elif Search(r'^\s*;\s*$', line):
error(filename, linenum, 'whitespace/semicolon', 5,
error(
filename, linenum, 'whitespace/semicolon', 5,
'Line contains only semicolon. If this should be an empty statement, '
'use {} instead.')
elif (Search(r'\s+;\s*$', line) and
not Search(r'\bfor\b', line)):
elif (Search(r'\s+;\s*$', line) and not Search(r'\bfor\b', line)):
error(filename, linenum, 'whitespace/semicolon', 5,
'Extra space before last semicolon. If this should be an empty '
'statement, use {} instead.')
......@@ -3429,10 +3444,10 @@ def IsTemplateParameterList(clean_lines, linenum, column):
Returns:
True if this token is end of a template parameter list, False otherwise.
"""
(_, startline, startpos) = ReverseCloseExpression(
clean_lines, linenum, column)
if (startpos > -1 and
Search(r'\btemplate\s*$', clean_lines.elided[startline][0:startpos])):
(_, startline, startpos) = ReverseCloseExpression(clean_lines, linenum,
column)
if (startpos > -1 and Search(r'\btemplate\s*$',
clean_lines.elided[startline][0:startpos])):
return True
return False
......@@ -3476,18 +3491,19 @@ def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
# recognize pointer and reference types:
# int* Function()
# int& Function()
if (match.group(2) in typenames or
match.group(2) in ['char', 'char16_t', 'char32_t', 'wchar_t', 'bool',
'short', 'int', 'long', 'signed', 'unsigned',
'float', 'double', 'void', 'auto', '>', '*', '&']):
if (match.group(2) in typenames or match.group(2) in [
'char', 'char16_t', 'char32_t', 'wchar_t', 'bool', 'short', 'int',
'long', 'signed', 'unsigned', 'float', 'double', 'void', 'auto',
'>', '*', '&'
]):
return True
# If we see a close parenthesis, look for decltype on the other side.
# decltype would unambiguously identify a type, anything else is
# probably a parenthesized expression and not a type.
if match.group(2) == ')':
return IsDecltype(
clean_lines, linenum, len(match.group(1)) + len(match.group(2)) - 1)
return IsDecltype(clean_lines, linenum,
len(match.group(1)) + len(match.group(2)) - 1)
# Check for casts and cv-qualifiers.
# match.group(1) remainder
......@@ -3496,8 +3512,7 @@ def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
# const type&&
# type const&&
if Search(r'\b(?:const_cast\s*<|static_cast\s*<|dynamic_cast\s*<|'
r'reinterpret_cast\s*<|\w+\s)\s*$',
match.group(1)):
r'reinterpret_cast\s*<|\w+\s)\s*$', match.group(1)):
return True
# Look for a preceding symbol that might help differentiate the context.
......@@ -3594,7 +3609,8 @@ def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
# sizeof(type&&
# operator=(type&&
#
if Search(r'(?:\]|\bfor|\bsizeof|\boperator\s*\S+\s*)\s*$', before_text):
if Search(r'(?:\]|\bfor|\bsizeof|\boperator\s*\S+\s*)\s*$',
before_text):
return True
# Patterns that are likely to be expressions:
......@@ -3618,9 +3634,10 @@ def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
# Check for constructors, which don't have return types.
if Search(r'\b(?:explicit|inline)$', match_func.group(1)):
return True
implicit_constructor = Match(r'\s*(\w+)\((?:const\s+)?(\w+)', prefix)
if (implicit_constructor and
implicit_constructor.group(1) == implicit_constructor.group(2)):
implicit_constructor = Match(r'\s*(\w+)\((?:const\s+)?(\w+)',
prefix)
if (implicit_constructor and implicit_constructor.group(1) ==
implicit_constructor.group(2)):
return True
return IsRValueType(typenames, clean_lines, nesting_state, linenum,
len(match_func.group(1)))
......@@ -3633,8 +3650,8 @@ def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
if match_symbol.group(2) == '>':
# Possibly a closing bracket, check that what's on the other side
# looks like the start of a template.
return IsTemplateParameterList(
clean_lines, start, len(match_symbol.group(1)))
return IsTemplateParameterList(clean_lines, start,
len(match_symbol.group(1)))
# Some other symbol, usually something like "a=b&&c". This is most
# likely not a type.
......@@ -3653,8 +3670,8 @@ def IsDeletedOrDefault(clean_lines, linenum):
open_paren = clean_lines.elided[linenum].find('(')
if open_paren < 0:
return False
(close_line, _, close_paren) = CloseExpression(
clean_lines, linenum, open_paren)
(close_line, _, close_paren) = CloseExpression(clean_lines, linenum,
open_paren)
if close_paren < 0:
return False
return Match(r'\s*=\s*(?:delete|default)\b', close_line[close_paren:])
......@@ -3697,7 +3714,8 @@ def IsRValueAllowed(clean_lines, linenum, typenames):
previous_line = 'ReturnType'
if linenum > 0:
previous_line = clean_lines.elided[linenum - 1]
if Match(r'^\s*$', previous_line) or Search(r'[{}:;]\s*$', previous_line):
if Match(r'^\s*$', previous_line) or Search(r'[{}:;]\s*$',
previous_line):
return IsDeletedOrDefault(clean_lines, linenum)
# Reject types not mentioned in template-argument-list
......@@ -3747,7 +3765,8 @@ def GetTemplateArgs(clean_lines, linenum):
if match:
# template-argument-list on the same line as function name
start_col = len(match.group(1))
_, end_line, end_col = CloseExpression(clean_lines, func_line, start_col)
_, end_line, end_col = CloseExpression(clean_lines, func_line,
start_col)
if end_col > -1 and end_line == func_line:
start_col += 1 # Skip the opening bracket
argument_list = clean_lines.elided[func_line][start_col:end_col]
......@@ -3765,7 +3784,8 @@ def GetTemplateArgs(clean_lines, linenum):
argument_list += clean_lines.elided[start_line][start_col:]
start_col = 0
start_line += 1
argument_list += clean_lines.elided[func_line - 1][start_col:end_col]
argument_list += clean_lines.elided[func_line - 1][start_col:
end_col]
if not argument_list:
return set()
......@@ -3801,7 +3821,8 @@ def CheckRValueReference(filename, clean_lines, linenum, nesting_state, error):
match = Match(r'^(.*\S)&&', line)
if not match:
match = Match(r'(.*)&&\S', line)
if (not match) or '(&&)' in line or Search(r'\boperator\s*$', match.group(1)):
if (not match) or '(&&)' in line or Search(r'\boperator\s*$',
match.group(1)):
return
# Either poorly formed && or an rvalue reference, check the context
......@@ -3845,7 +3866,8 @@ def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
linenum <= class_info.starting_linenum):
return
matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum])
matched = Match(r'\s*(public|protected|private):',
clean_lines.lines[linenum])
if matched:
# Issue warning if the line before public/protected/private was
# not a blank line, but don't do this if the previous line contains
......@@ -3870,7 +3892,8 @@ def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
break
if end_class_head < linenum - 1:
error(filename, linenum, 'whitespace/blank_line', 3,
'"%s:" should be preceded by a blank line' % matched.group(1))
'"%s:" should be preceded by a blank line' %
matched.group(1))
def GetPreviousNonBlankLine(clean_lines, linenum):
......@@ -3940,8 +3963,10 @@ def CheckBraces(filename, clean_lines, linenum, error):
(endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
brace_on_right = endline[endpos:].find('{') != -1
if brace_on_left != brace_on_right: # must be brace after if
error(filename, linenum, 'readability/braces', 5,
'If an else has a brace on one side, it should have it on both')
error(
filename, linenum, 'readability/braces', 5,
'If an else has a brace on one side, it should have it on both'
)
elif Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
error(filename, linenum, 'readability/braces', 5,
'If an else has a brace on one side, it should have it on both')
......@@ -3971,15 +3996,16 @@ def CheckBraces(filename, clean_lines, linenum, error):
if if_match:
# This could be a multiline if condition, so find the end first.
pos = if_match.end() - 1
(endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum, pos)
(endline, endlinenum, endpos) = CloseExpression(clean_lines,
linenum, pos)
# Check for an opening brace, either directly after the if or on the next
# line. If found, this isn't a single-statement conditional.
if (not Match(r'\s*{', endline[endpos:])
and not (Match(r'\s*$', endline[endpos:])
and endlinenum < (len(clean_lines.elided) - 1)
and Match(r'\s*{', clean_lines.elided[endlinenum + 1]))):
while (endlinenum < len(clean_lines.elided)
and ';' not in clean_lines.elided[endlinenum][endpos:]):
if (not Match(r'\s*{', endline[endpos:]) and
not (Match(r'\s*$', endline[endpos:]) and endlinenum <
(len(clean_lines.elided) - 1) and
Match(r'\s*{', clean_lines.elided[endlinenum + 1]))):
while (endlinenum < len(clean_lines.elided) and
';' not in clean_lines.elided[endlinenum][endpos:]):
endlinenum += 1
endpos = 0
if endlinenum < len(clean_lines.elided):
......@@ -3991,10 +4017,13 @@ def CheckBraces(filename, clean_lines, linenum, error):
# Semicolon isn't the last character, there's something trailing.
# Output a warning if the semicolon is not contained inside
# a lambda expression.
if not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\)*[;,]\s*$',
if not Match(
r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\)*[;,]\s*$',
endline):
error(filename, linenum, 'readability/braces', 4,
'If/else bodies with multiple statements require braces')
error(
filename, linenum, 'readability/braces', 4,
'If/else bodies with multiple statements require braces'
)
elif endlinenum < len(clean_lines.elided) - 1:
# Make sure the next line is dedented
next_line = clean_lines.elided[endlinenum + 1]
......@@ -4002,14 +4031,17 @@ def CheckBraces(filename, clean_lines, linenum, error):
# With ambiguous nested if statements, this will error out on the
# if that *doesn't* match the else, regardless of whether it's the
# inner one or outer one.
if (if_match and Match(r'\s*else\b', next_line)
and next_indent != if_indent):
error(filename, linenum, 'readability/braces', 4,
if (if_match and Match(r'\s*else\b', next_line) and
next_indent != if_indent):
error(
filename, linenum, 'readability/braces', 4,
'Else clause should be indented at the same level as if. '
'Ambiguous nested if/else chains require braces.')
elif next_indent > if_indent:
error(filename, linenum, 'readability/braces', 4,
'If/else bodies with multiple statements require braces')
error(
filename, linenum, 'readability/braces', 4,
'If/else bodies with multiple statements require braces'
)
def CheckTrailingSemicolon(filename, clean_lines, linenum, error):
......@@ -4096,24 +4128,22 @@ def CheckTrailingSemicolon(filename, clean_lines, linenum, error):
# - Lambdas
# - alignas specifier with anonymous structs:
closing_brace_pos = match.group(1).rfind(')')
opening_parenthesis = ReverseCloseExpression(
clean_lines, linenum, closing_brace_pos)
opening_parenthesis = ReverseCloseExpression(clean_lines, linenum,
closing_brace_pos)
if opening_parenthesis[2] > -1:
line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]]
macro = Search(r'\b([A-Z_]+)\s*$', line_prefix)
func = Match(r'^(.*\])\s*$', line_prefix)
if ((macro and
macro.group(1) not in (
'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST',
if ((macro and macro.group(1) not in
('TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST',
'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
(func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or
Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or
Search(r'\s+=\s*$', line_prefix)):
match = None
if (match and
opening_parenthesis[1] > 1 and
Search(r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])):
if (match and opening_parenthesis[1] > 1 and Search(
r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])):
# Multi-line lambda-expression
match = None
......@@ -4135,8 +4165,8 @@ def CheckTrailingSemicolon(filename, clean_lines, linenum, error):
# Check matching closing brace
if match:
(endline, endlinenum, endpos) = CloseExpression(
clean_lines, linenum, len(match.group(1)))
(endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum,
len(match.group(1)))
if endpos > -1 and Match(r'^\s*;', endline[endpos:]):
# Current {} pair is eligible for semicolon check, and we have found
# the redundant semicolon, output warning here.
......@@ -4169,15 +4199,16 @@ def CheckEmptyBlockBody(filename, clean_lines, linenum, error):
matched = Match(r'\s*(for|while|if)\s*\(', line)
if matched:
# Find the end of the conditional expression
(end_line, end_linenum, end_pos) = CloseExpression(
clean_lines, linenum, line.find('('))
(end_line, end_linenum, end_pos) = CloseExpression(clean_lines, linenum,
line.find('('))
# Output warning if what follows the condition expression is a semicolon.
# No warning for all other cases, including whitespace or newline, since we
# have a separate check for semicolons preceded by whitespace.
if end_pos >= 0 and Match(r';', end_line[end_pos:]):
if matched.group(1) == 'if':
error(filename, end_linenum, 'whitespace/empty_conditional_body', 5,
error(filename, end_linenum,
'whitespace/empty_conditional_body', 5,
'Empty conditional bodies should use {}')
else:
error(filename, end_linenum, 'whitespace/empty_loop_body', 5,
......@@ -4224,8 +4255,8 @@ def CheckCheck(filename, clean_lines, linenum, error):
return
# Find end of the boolean expression by matching parentheses
(last_line, end_line, end_pos) = CloseExpression(
clean_lines, linenum, start_pos)
(last_line, end_line, end_pos) = CloseExpression(clean_lines, linenum,
start_pos)
if end_pos < 0:
return
......@@ -4320,9 +4351,9 @@ def CheckCheck(filename, clean_lines, linenum, error):
# We are still keeping the less descriptive message because if lhs
# or rhs gets long, the error message might become unreadable.
error(filename, linenum, 'readability/check', 2,
'Consider using %s instead of %s(a %s b)' % (
_CHECK_REPLACEMENT[check_macro][operator],
check_macro, operator))
'Consider using %s instead of %s(a %s b)' %
(_CHECK_REPLACEMENT[check_macro][operator], check_macro,
operator))
def CheckAltTokens(filename, clean_lines, linenum, error):
......@@ -4526,8 +4557,8 @@ def _DropCommonSuffixes(filename):
Returns:
The filename with the common suffix removed.
"""
for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
'inl.h', 'impl.h', 'internal.h'):
for suffix in ('test.cc', 'regtest.cc', 'unittest.cc', 'inl.h', 'impl.h',
'internal.h'):
if (filename.endswith(suffix) and len(filename) > len(suffix) and
filename[-len(suffix) - 1] in ('-', '_')):
return filename[:-len(suffix) - 1]
......@@ -4543,8 +4574,7 @@ def _IsTestFilename(filename):
Returns:
True if 'filename' looks like a test, False otherwise.
"""
if (filename.endswith('_test.cc') or
filename.endswith('_unittest.cc') or
if (filename.endswith('_test.cc') or filename.endswith('_unittest.cc') or
filename.endswith('_regtest.cc')):
return True
else:
......@@ -4610,7 +4640,6 @@ def _ClassifyInclude(fileinfo, include, is_system):
return _OTHER_HEADER
def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
"""Check rules that are applicable to #include lines.
......@@ -4653,7 +4682,8 @@ def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
'"%s" already included at %s:%s' %
(include, filename, duplicate_line))
elif (include.endswith('.cc') and
os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
os.path.dirname(fileinfo.RepositoryName()) !=
os.path.dirname(include)):
error(filename, linenum, 'build/include', 4,
'Do not include .cc files from other packages')
elif not _THIRD_PARTY_HEADERS_PATTERN.match(include):
......@@ -4676,15 +4706,15 @@ def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
error(filename, linenum, 'build/include_order', 4,
'%s. Should be: %s.h, c system, c++ system, other.' %
(error_message, fileinfo.BaseName()))
canonical_include = include_state.CanonicalizeAlphabeticalOrder(include)
if not include_state.IsInAlphabeticalOrder(
clean_lines, linenum, canonical_include):
canonical_include = include_state.CanonicalizeAlphabeticalOrder(
include)
if not include_state.IsInAlphabeticalOrder(clean_lines, linenum,
canonical_include):
error(filename, linenum, 'build/include_alpha', 4,
'Include "%s" not in alphabetical order' % include)
include_state.SetLastHeader(canonical_include)
def _GetTextInside(text, start_pattern):
r"""Retrieves all the text between matching open and close parentheses.
......@@ -4763,12 +4793,12 @@ _RE_PATTERN_REF_PARAM = re.compile(
# A call-by-const-reference parameter either ends with 'const& identifier'
# or looks like 'const type& identifier' when 'type' is atomic.
_RE_PATTERN_CONST_REF_PARAM = (
r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT +
r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')')
r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT + r'|const\s+' +
_RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')')
def CheckLanguage(filename, clean_lines, linenum, file_extension,
include_state, nesting_state, error):
def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state,
nesting_state, error):
"""Checks rules from the 'C++ language rules' section of cppguide.html.
Some of these rules are hard to test (function overloading, using
......@@ -4827,7 +4857,8 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension,
match = Search(r'\b(short|long(?! +double)|long long)\b', line)
if match:
error(filename, linenum, 'runtime/int', 4,
'Use int16/int64/etc, rather than the C type %s' % match.group(1))
'Use int16/int64/etc, rather than the C type %s' %
match.group(1))
# Check if some verboten operator overloading is going on
# TODO(unknown): catch out-of-line unary operator&:
......@@ -4856,18 +4887,18 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension,
if printf_args:
match = Match(r'([\w.\->()]+)$', printf_args)
if match and match.group(1) != '__VA_ARGS__':
function_name = re.search(r'\b((?:string)?printf)\s*\(',
line, re.I).group(1)
function_name = re.search(r'\b((?:string)?printf)\s*\(', line,
re.I).group(1)
error(filename, linenum, 'runtime/printf', 4,
'Potential format string bug. Do %s("%%s", %s) instead.'
% (function_name, match.group(1)))
'Potential format string bug. Do %s("%%s", %s) instead.' %
(function_name, match.group(1)))
# Check for potential memset bugs like memset(buf, sizeof(buf), 0).
match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line)
if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
error(filename, linenum, 'runtime/memset', 4,
'Did you mean "memset(%s, 0, %s)"?'
% (match.group(1), match.group(2)))
'Did you mean "memset(%s, 0, %s)"?' %
(match.group(1), match.group(2)))
if Search(r'\busing namespace\b', line):
error(filename, linenum, 'build/namespaces', 5,
......@@ -4909,17 +4940,19 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension,
is_const = False
break
if not is_const:
error(filename, linenum, 'runtime/arrays', 1,
error(
filename, linenum, 'runtime/arrays', 1,
'Do not use variable-length arrays. Use an appropriately named '
"('k' followed by CamelCase) compile-time constant for the size.")
"('k' followed by CamelCase) compile-time constant for the size."
)
# Check for use of unnamed namespaces in header files. Registration
# macros are typically OK, so we allow use of "namespace {" on lines
# that end with backslashes.
if (file_extension == 'h'
and Search(r'\bnamespace\s*{', line)
and line[-1] != '\\'):
error(filename, linenum, 'build/namespaces', 4,
if (file_extension == 'h' and Search(r'\bnamespace\s*{', line) and
line[-1] != '\\'):
error(
filename, linenum, 'build/namespaces', 4,
'Do not use unnamed namespaces in header files. See '
'http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
' for more information.')
......@@ -4943,8 +4976,7 @@ def CheckGlobalStatic(filename, clean_lines, linenum, error):
# Check for people declaring static/global STL strings at the top level.
# This is dangerous because the C++ language does not guarantee that
# globals with constructors are initialized before the first access.
match = Match(
r'((?:|static +)(?:|const +))string +([a-zA-Z0-9_:]+)\b(.*)',
match = Match(r'((?:|static +)(?:|const +))string +([a-zA-Z0-9_:]+)\b(.*)',
line)
# Remove false positives:
......@@ -4965,12 +4997,12 @@ def CheckGlobalStatic(filename, clean_lines, linenum, error):
# string Class::operator*()
if (match and
not Search(r'\bstring\b(\s+const)?\s*\*\s*(const\s+)?\w', line) and
not Search(r'\boperator\W', line) and
not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\(([^"]|$)', match.group(3))):
error(filename, linenum, 'runtime/string', 4,
not Search(r'\boperator\W', line) and not Match(
r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\(([^"]|$)', match.group(3))):
error(
filename, linenum, 'runtime/string', 4,
'For a static/global string constant, use a C style string instead: '
'"%schar %s[]".' %
(match.group(1), match.group(2)))
'"%schar %s[]".' % (match.group(1), match.group(2)))
if Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line):
error(filename, linenum, 'runtime/init', 4,
......@@ -5021,8 +5053,8 @@ def IsDerivedFunction(clean_lines, linenum):
match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i])
if match:
# Look for "override" after the matching closing parenthesis
line, _, closing_paren = CloseExpression(
clean_lines, i, len(match.group(1)))
line, _, closing_paren = CloseExpression(clean_lines, i,
len(match.group(1)))
return (closing_paren >= 0 and
Search(r'\boverride\b', line[closing_paren:]))
return False
......@@ -5040,7 +5072,8 @@ def IsOutOfLineMethodDefinition(clean_lines, linenum):
# Scan back a few lines for start of current function
for i in xrange(linenum, max(-1, linenum - 10), -1):
if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]):
return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None
return Match(r'^[^()]*\w+::\w+\(',
clean_lines.elided[i]) is not None
return False
......@@ -5086,8 +5119,8 @@ def IsInitializerList(clean_lines, linenum):
return False
def CheckForNonConstReference(filename, clean_lines, linenum,
nesting_state, error):
def CheckForNonConstReference(filename, clean_lines, linenum, nesting_state,
error):
"""Check for non-const references.
Separate from CheckLanguage since it scans backwards from current
......@@ -5211,8 +5244,8 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
# didn't see any function name on this line, so this is likely a
# multi-line parameter list. Try a bit harder to catch this case.
for i in xrange(2):
if (linenum > i and
Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
if (linenum > i and Search(whitelisted_functions,
clean_lines.elided[linenum - i - 1])):
return
decls = ReplaceAll(r'{[^}]*}', ' ', line) # exclude function body
......@@ -5220,8 +5253,8 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
if not Match(_RE_PATTERN_CONST_REF_PARAM, parameter):
error(filename, linenum, 'runtime/references', 2,
'Is this a non-const reference? '
'If so, make const or use a pointer: ' +
ReplaceAll(' *<', '<', parameter))
'If so, make const or use a pointer: ' + ReplaceAll(
' *<', '<', parameter))
def CheckCasts(filename, clean_lines, linenum, error):
......@@ -5239,8 +5272,7 @@ def CheckCasts(filename, clean_lines, linenum, error):
# I just try to capture the most common basic types, though there are more.
# Parameterless conversion functions, such as bool(), are allowed as they are
# probably a member operator declaration or default constructor.
match = Search(
r'(\bnew\s+|\S<\s*(?:const\s+)?)?\b'
match = Search(r'(\bnew\s+|\S<\s*(?:const\s+)?)?\b'
r'(int|float|double|bool|char|int32|uint32|int64|uint64)'
r'(\([^)].*)', line)
expecting_function = ExpectingFunctionArgs(clean_lines, linenum)
......@@ -5272,21 +5304,19 @@ def CheckCasts(filename, clean_lines, linenum, error):
# - Placement new
# - Alias declarations
matched_funcptr = match.group(3)
if (matched_new_or_template is None and
not (matched_funcptr and
(Match(r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(',
matched_funcptr) or
matched_funcptr.startswith('(*)'))) and
if (matched_new_or_template is None and not (matched_funcptr and (Match(
r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(',
matched_funcptr) or matched_funcptr.startswith('(*)'))) and
not Match(r'\s*using\s+\S+\s*=\s*' + matched_type, line) and
not Search(r'new\(\S+\)\s*' + matched_type, line)):
error(filename, linenum, 'readability/casting', 4,
'Using deprecated casting style. '
'Use static_cast<%s>(...) instead' %
matched_type)
'Use static_cast<%s>(...) instead' % matched_type)
if not expecting_function:
CheckCStyleCast(filename, clean_lines, linenum, 'static_cast',
r'\((int|float|double|bool|char|u?int(16|32|64))\)', error)
r'\((int|float|double|bool|char|u?int(16|32|64))\)',
error)
# This doesn't catch all cases. Consider (const char * const)"hello".
#
......@@ -5311,17 +5341,18 @@ def CheckCasts(filename, clean_lines, linenum, error):
#
# This is not a cast:
# reference_type&(int* function_param);
match = Search(
r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|'
match = Search(r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|'
r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line)
if match:
# Try a better error message when the & is bound to something
# dereferenced by the casted pointer, as opposed to the casted
# pointer itself.
parenthesis_error = False
match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<', line)
match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<',
line)
if match:
_, y1, x1 = CloseExpression(clean_lines, linenum, len(match.group(1)))
_, y1, x1 = CloseExpression(clean_lines, linenum,
len(match.group(1)))
if x1 >= 0 and clean_lines.elided[y1][x1] == '(':
_, y2, x2 = CloseExpression(clean_lines, y1, x1)
if x2 >= 0:
......@@ -5470,42 +5501,73 @@ def ExpectingFunctionArgs(clean_lines, linenum):
_HEADERS_CONTAINING_TEMPLATES = (
('<deque>', ('deque',)),
('<functional>', ('unary_function', 'binary_function',
'plus', 'minus', 'multiplies', 'divides', 'modulus',
('<deque>', ('deque', )),
('<functional>', (
'unary_function',
'binary_function',
'plus',
'minus',
'multiplies',
'divides',
'modulus',
'negate',
'equal_to', 'not_equal_to', 'greater', 'less',
'greater_equal', 'less_equal',
'logical_and', 'logical_or', 'logical_not',
'unary_negate', 'not1', 'binary_negate', 'not2',
'bind1st', 'bind2nd',
'equal_to',
'not_equal_to',
'greater',
'less',
'greater_equal',
'less_equal',
'logical_and',
'logical_or',
'logical_not',
'unary_negate',
'not1',
'binary_negate',
'not2',
'bind1st',
'bind2nd',
'pointer_to_unary_function',
'pointer_to_binary_function',
'ptr_fun',
'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
'mem_fun_t',
'mem_fun',
'mem_fun1_t',
'mem_fun1_ref_t',
'mem_fun_ref_t',
'const_mem_fun_t', 'const_mem_fun1_t',
'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
'mem_fun_ref',
)),
('<limits>', ('numeric_limits',)),
('<list>', ('list',)),
('<map>', ('map', 'multimap',)),
('<memory>', ('allocator',)),
('<queue>', ('queue', 'priority_queue',)),
('<set>', ('set', 'multiset',)),
('<stack>', ('stack',)),
('<string>', ('char_traits', 'basic_string',)),
('<tuple>', ('tuple',)),
('<utility>', ('pair',)),
('<vector>', ('vector',)),
'const_mem_fun_t',
'const_mem_fun1_t',
'const_mem_fun_ref_t',
'const_mem_fun1_ref_t',
'mem_fun_ref', )),
('<limits>', ('numeric_limits', )),
('<list>', ('list', )),
('<map>', (
'map',
'multimap', )),
('<memory>', ('allocator', )),
('<queue>', (
'queue',
'priority_queue', )),
('<set>', (
'set',
'multiset', )),
('<stack>', ('stack', )),
('<string>', (
'char_traits',
'basic_string', )),
('<tuple>', ('tuple', )),
('<utility>', ('pair', )),
('<vector>', ('vector', )),
# gcc extensions.
# Note: std::hash is their hash, ::hash is our hash
('<hash_map>', ('hash_map', 'hash_multimap',)),
('<hash_set>', ('hash_set', 'hash_multiset',)),
('<slist>', ('slist',)),
)
('<hash_map>', (
'hash_map',
'hash_multimap', )),
('<hash_set>', (
'hash_set',
'hash_multiset', )),
('<slist>', ('slist', )), )
_RE_PATTERN_STRING = re.compile(r'\bstring\b')
......@@ -5515,16 +5577,14 @@ for _template in ('copy', 'max', 'min', 'min_element', 'sort', 'swap',
# Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
# type::max().
_re_pattern_algorithm_header.append(
(re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
_template,
(re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'), _template,
'<algorithm>'))
_re_pattern_templates = []
for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
for _template in _templates:
_re_pattern_templates.append(
(re.compile(r'(\<|\b)' + _template + r'\s*\<'),
_template + '<>',
(re.compile(r'(\<|\b)' + _template + r'\s*\<'), _template + '<>',
_header))
......@@ -5610,7 +5670,10 @@ def UpdateIncludeState(filename, include_dict, io=codecs):
return True
def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
def CheckForIncludeWhatYouUse(filename,
clean_lines,
include_state,
error,
io=codecs):
"""Reports for missing stl includes.
......@@ -5660,8 +5723,8 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
# The policy is that if you #include something in foo.h you don't need to
# include it again in foo.cc. Here, we will look at possible includes.
# Let's flatten the include_state include_list and copy it into a dictionary.
include_dict = dict([item for sublist in include_state.include_list
for item in sublist])
include_dict = dict(
[item for sublist in include_state.include_list for item in sublist])
# Did we find the header for this file (if any) and successfully load it?
header_found = False
......@@ -5682,7 +5745,8 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
# the keys.
header_keys = include_dict.keys()
for header in header_keys:
(same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
(same_module, common_path) = FilesBelongToSameModule(abs_filename,
header)
fullpath = common_path + header
if same_module and UpdateIncludeState(fullpath, include_dict, io):
header_found = True
......@@ -5700,8 +5764,8 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
template = required[required_header_unstripped][1]
if required_header_unstripped.strip('<>"') not in include_dict:
error(filename, required[required_header_unstripped][0],
'build/include_what_you_use', 4,
'Add #include ' + required_header_unstripped + ' for ' + template)
'build/include_what_you_use', 4, 'Add #include ' +
required_header_unstripped + ' for ' + template)
_RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<')
......@@ -5722,7 +5786,10 @@ def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
line = clean_lines.elided[linenum]
match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
if match:
error(filename, linenum, 'build/explicit_make_pair',
error(
filename,
linenum,
'build/explicit_make_pair',
4, # 4 = high confidence
'For C++11-compatibility, omit template arguments from make_pair'
' OR use pair directly OR if appropriate, construct a pair directly')
......@@ -5746,9 +5813,13 @@ def CheckDefaultLambdaCaptures(filename, clean_lines, linenum, error):
# Found a potential error, check what comes after the lambda-introducer.
# If it's not open parenthesis (for lambda-declarator) or open brace
# (for compound-statement), it's not a lambda.
line, _, pos = CloseExpression(clean_lines, linenum, len(match.group(1)))
line, _, pos = CloseExpression(clean_lines, linenum,
len(match.group(1)))
if pos >= 0 and Match(r'^\s*[{(]', line[pos:]):
error(filename, linenum, 'build/c++11',
error(
filename,
linenum,
'build/c++11',
4, # 4 = high confidence
'Default lambda captures are an unapproved C++ feature.')
......@@ -5793,7 +5864,8 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
if parameter_list:
# Match parentheses to find the end of the parameter list
(_, end_line, end_col) = CloseExpression(
clean_lines, start_line, start_col + len(parameter_list.group(1)))
clean_lines, start_line,
start_col + len(parameter_list.group(1)))
break
start_col = 0
......@@ -5846,8 +5918,6 @@ def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error):
'already declared as "final"'))
# Returns true if we are at a new block, and it is directly
# inside of a namespace.
def IsBlockInNameSpace(nesting_state, is_forward_declaration):
......@@ -5912,8 +5982,14 @@ def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum,
'Do not indent within a namespace')
def ProcessLine(filename, file_extension, clean_lines, line,
include_state, function_state, nesting_state, error,
def ProcessLine(filename,
file_extension,
clean_lines,
line,
include_state,
function_state,
nesting_state,
error,
extra_check_functions=[]):
"""Processes a single line in the file.
......@@ -5941,12 +6017,13 @@ def ProcessLine(filename, file_extension, clean_lines, line,
if nesting_state.InAsmBlock(): return
CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error)
CheckStyle(filename, clean_lines, line, file_extension, nesting_state,
error)
CheckLanguage(filename, clean_lines, line, file_extension, include_state,
nesting_state, error)
CheckForNonConstReference(filename, clean_lines, line, nesting_state, error)
CheckForNonStandardConstructs(filename, clean_lines, line,
nesting_state, error)
CheckForNonStandardConstructs(filename, clean_lines, line, nesting_state,
error)
CheckVlogArguments(filename, clean_lines, line, error)
CheckPosixThreading(filename, clean_lines, line, error)
CheckInvalidIncrement(filename, clean_lines, line, error)
......@@ -5957,6 +6034,7 @@ def ProcessLine(filename, file_extension, clean_lines, line,
for check_fn in extra_check_functions:
check_fn(filename, clean_lines, line, error)
def FlagCxx11Features(filename, clean_lines, linenum, error):
"""Flag those c++11 features that we only allow in certain places.
......@@ -5970,7 +6048,8 @@ def FlagCxx11Features(filename, clean_lines, linenum, error):
# Flag unapproved C++11 headers.
include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
if include and include.group(1) in ('cfenv',
if include and include.group(1) in (
'cfenv',
'condition_variable',
'fenv.h',
'future',
......@@ -5979,8 +6058,7 @@ def FlagCxx11Features(filename, clean_lines, linenum, error):
'chrono',
'ratio',
'regex',
'system_error',
):
'system_error', ):
error(filename, linenum, 'build/c++11', 5,
('<%s> is an unapproved C++11 header.') % include.group(1))
......@@ -5994,16 +6072,18 @@ def FlagCxx11Features(filename, clean_lines, linenum, error):
for top_name in (
# type_traits
'alignment_of',
'aligned_union',
):
'aligned_union', ):
if Search(r'\bstd::%s\b' % top_name, line):
error(filename, linenum, 'build/c++11', 5,
('std::%s is an unapproved C++11 class or function. Send c-style '
error(filename, linenum, 'build/c++11', 5, (
'std::%s is an unapproved C++11 class or function. Send c-style '
'an example of where it would make your code more readable, and '
'they may let you use it.') % top_name)
def ProcessFileData(filename, file_extension, lines, error,
def ProcessFileData(filename,
file_extension,
lines,
error,
extra_check_functions=[]):
"""Performs lint checks and reports any errors to the given error function.
......@@ -6036,9 +6116,8 @@ def ProcessFileData(filename, file_extension, lines, error,
CheckForHeaderGuard(filename, clean_lines, error)
for line in xrange(clean_lines.NumLines()):
ProcessLine(filename, file_extension, clean_lines, line,
include_state, function_state, nesting_state, error,
extra_check_functions)
ProcessLine(filename, file_extension, clean_lines, line, include_state,
function_state, nesting_state, error, extra_check_functions)
FlagCxx11Features(filename, clean_lines, line, error)
nesting_state.CheckCompletedBlocks(filename, error)
......@@ -6054,6 +6133,7 @@ def ProcessFileData(filename, file_extension, lines, error,
CheckForNewlineAtEOF(filename, lines, error)
def ProcessConfigOverrides(filename):
""" Loads the configuration files and processes the config overrides.
......@@ -6101,7 +6181,8 @@ def ProcessConfigOverrides(filename):
if base_name:
pattern = re.compile(val)
if pattern.match(base_name):
sys.stderr.write('Ignoring "%s": file excluded by "%s". '
sys.stderr.write(
'Ignoring "%s": file excluded by "%s". '
'File path component "%s" matches '
'pattern "%s"\n' %
(filename, cfg_file, base_name, val))
......@@ -6119,7 +6200,8 @@ def ProcessConfigOverrides(filename):
except IOError:
sys.stderr.write(
"Skipping config file '%s': Can't open for reading\n" % cfg_file)
"Skipping config file '%s': Can't open for reading\n" %
cfg_file)
keep_looking = False
# Apply all the accumulated filters in reverse order (top-level directory
......@@ -6167,7 +6249,8 @@ def ProcessFile(filename, vlevel, extra_check_functions=[]):
codecs.getwriter('utf8'),
'replace').read().split('\n')
else:
lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
lines = codecs.open(filename, 'r', 'utf8',
'replace').read().split('\n')
# Remove trailing '\r'.
# The -1 accounts for the extra trailing blank line we get from split()
......@@ -6179,8 +6262,8 @@ def ProcessFile(filename, vlevel, extra_check_functions=[]):
lf_lines.append(linenum + 1)
except IOError:
sys.stderr.write(
"Skipping input '%s': Can't open for reading\n" % filename)
sys.stderr.write("Skipping input '%s': Can't open for reading\n" %
filename)
_RestoreFilters()
return
......@@ -6252,12 +6335,10 @@ def ParseArguments(args):
The list of filenames to lint.
"""
try:
(opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
'counting=',
'filter=',
'root=',
'linelength=',
'extensions='])
(opts, filenames) = getopt.getopt(args, '', [
'help', 'output=', 'verbose=', 'counting=', 'filter=', 'root=',
'linelength=', 'extensions='
])
except getopt.GetoptError:
PrintUsage('Invalid arguments.')
......@@ -6271,7 +6352,9 @@ def ParseArguments(args):
PrintUsage(None)
elif opt == '--output':
if val not in ('emacs', 'vs7', 'eclipse'):
PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.')
PrintUsage(
'The only allowed output formats are emacs, vs7 and eclipse.'
)
output_format = val
elif opt == '--verbose':
verbosity = int(val)
......@@ -6281,7 +6364,8 @@ def ParseArguments(args):
PrintCategories()
elif opt == '--counting':
if val not in ('total', 'toplevel', 'detailed'):
PrintUsage('Valid counting options are total, toplevel, and detailed')
PrintUsage(
'Valid counting options are total, toplevel, and detailed')
counting_style = val
elif opt == '--root':
global _root
......@@ -6317,8 +6401,7 @@ def main():
# if we try to print something containing non-ASCII characters.
sys.stderr = codecs.StreamReaderWriter(sys.stderr,
codecs.getreader('utf8'),
codecs.getwriter('utf8'),
'replace')
codecs.getwriter('utf8'), 'replace')
_cpplint_state.ResetErrorCounts()
for filename in filenames:
......
......@@ -33,5 +33,3 @@ cmake .. -DWITH_GPU=ON -DWITH_SWIG_PY=ON -DWITH_AVX=OFF -DCUDNN_ROOT=/usr/
make -j `nproc`
cpack -D CPACK_GENERATOR='DEB' ..
mv *.deb ~/dist/gpu-noavx
......@@ -58,4 +58,3 @@ m4 -DPADDLE_WITH_GPU=ON -DPADDLE_IS_DEVEL=ON -DPADDLE_WITH_DEMO=ON \
-DPADDLE_BASE_IMAGE=nvidia/cuda:7.5-cudnn5-devel-ubuntu14.04 \
-DPADDLE_WITH_AVX=OFF \
Dockerfile.m4 > Dockerfile.gpu-noavx-demo
......@@ -2,4 +2,3 @@
set -e
mkdir -p ../../../build
cd ../../../build
......@@ -998,4 +998,3 @@ from IN B-PP
Friday NNP B-NP
's POS B-NP
Tokyo NNP I-NP
......@@ -4998,4 +4998,3 @@ However RB B-ADVP
the DT B-NP
disclosure NN I-NP
of IN B-PP
......@@ -109,4 +109,3 @@ int main(int argc, char** argv) {
}
#endif
......@@ -410,8 +410,8 @@ def RecurrentLayerGroupEnd(name):
"RecurrentLayerGroup not begin")
for pair in g_current_submodel.memories: #check exist
layer = g_layer_map[pair.layer_name]
config_assert(layer is not None, "memory declare wrong name:%s" %
pair.layer_name)
config_assert(layer is not None,
"memory declare wrong name:%s" % pair.layer_name)
memory_link = g_layer_map[pair.link_name]
config_assert(layer.size == memory_link.size,
"memory declare wrong size:%d" % memory_link.size)
......@@ -686,8 +686,8 @@ class ConvProjection(Projection):
parse_conv(conv_conf, input_layer_name, self.proj_conf.conv_conf,
num_filters)
# TODO: support rectangle input
self.proj_conf.output_size = (self.proj_conf.conv_conf.output_x**
2) * num_filters
self.proj_conf.output_size = (self.proj_conf.conv_conf.output_x
**2) * num_filters
def calc_output_size(self, input_layer_config):
return self.proj_conf.output_size
......@@ -2793,8 +2793,8 @@ class ConcatenateLayer2(LayerBase):
@config_layer('recurrent')
class RecurrentLayer(LayerBase):
def __init__(self, name, inputs, reversed=False, bias=True, **xargs):
super(RecurrentLayer, self).__init__(name, 'recurrent', 0, inputs, **
xargs)
super(RecurrentLayer, self).__init__(name, 'recurrent', 0, inputs,
**xargs)
config_assert(len(self.inputs) == 1, 'RecurrentLayer must have 1 input')
input_layer = self.get_input_layer(0)
size = input_layer.size
......@@ -2876,22 +2876,22 @@ class MDLstmLayer(LayerBase):
active_state_type="sigmoid",
bias=True,
**xargs):
super(MDLstmLayer, self).__init__(name, 'mdlstmemory', 0, inputs, **
xargs)
super(MDLstmLayer, self).__init__(name, 'mdlstmemory', 0, inputs,
**xargs)
config_assert(len(self.inputs) == 1, 'MDLstmLayer must have 1 input')
input_layer = self.get_input_layer(0)
dim_num = len(directions)
#check input_layer.size is divided by (3+dim_num)
config_assert(input_layer.size %
(3 + dim_num) == 0, "size % (dim_num) should be 0!")
config_assert(input_layer.size % (3 + dim_num) == 0,
"size % (dim_num) should be 0!")
size = input_layer.size / (3 + dim_num)
self.set_layer_size(size)
self.config.active_gate_type = active_gate_type
self.config.active_state_type = active_state_type
for i in xrange(len(directions)):
self.config.directions.append(int(directions[i]))
self.create_input_parameter(0, size * size *
(3 + dim_num), [size, size, 3 + dim_num])
self.create_input_parameter(0, size * size * (3 + dim_num),
[size, size, 3 + dim_num])
#bias includes 3 kinds of peephole, 3+dim_num+2+dim_num
self.create_bias_parameter(bias, size * (5 + 2 * dim_num))
......@@ -2929,8 +2929,8 @@ class GruStepLayer(LayerBase):
active_gate_type="sigmoid",
bias=True,
**xargs):
super(GruStepLayer, self).__init__(name, 'gru_step', size, inputs, **
xargs)
super(GruStepLayer, self).__init__(name, 'gru_step', size, inputs,
**xargs)
config_assert(len(self.inputs) == 2, 'GruStepLayer must have 2 input')
input_layer0 = self.get_input_layer(0)
input_layer1 = self.get_input_layer(1)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册