3 """The Tab Nanny despises ambiguous indentation. She knows no mercy."""
17 if not hasattr(tokenize,
'NL'):
18 raise ValueError(
"tokenize.NL doesn't exist -- tokenize module too old")
20 __all__ = [
"check",
"NannyNag",
"process_tokens"]
28 sys.stderr.write(sep +
str(arg))
30 sys.stderr.write(
"\n")
33 global verbose, filename_only
36 except getopt.error, msg:
41 filename_only = filename_only + 1
45 errprint(
"Usage:", sys.argv[0],
"[-v] file_or_directory ...")
52 self.lineno, self.msg, self.
line = lineno, msg, line
61 if os.path.isdir(file)
and not os.path.islink(file):
63 print "%s: listing directory" % `file`
64 names = os.listdir(file)
66 fullname = os.path.join(file, name)
67 if (os.path.isdir(fullname)
and
68 not os.path.islink(fullname)
or
69 os.path.normcase(name[-3:]) ==
".py"):
80 print "checking", `file`,
"..."
85 except tokenize.TokenError, msg:
86 errprint(
"%s: Token Error: %s" % (`file`,
str(msg)))
90 badline = nag.get_lineno()
93 print "%s: *** Line %d: trouble in tab city! ***" % (
95 print "offending line:", `line`
98 if ' ' in file: file =
'"' + file +
'"'
99 if filename_only:
print file
100 else:
print file, badline, `line`
104 print "%s: Clean bill of health." % `file`
131 S, T = Whitespace.S, Whitespace.T
142 count = count + [0] * (b - len(count) + 1)
143 count[b] = count[b] + 1
155 count, trailing = self.
norm
156 return max(len(count)-1, trailing)
173 count, trailing = self.
norm
175 for i
in range(tabsize, len(count)):
176 il = il + i/tabsize * count[i]
177 return trailing + tabsize * (il + self.
nt)
182 return self.
norm == other.norm
190 other.longest_run_of_spaces()) + 1
192 for ts
in range(1, n+1):
196 other.indent_level(ts)) )
213 if self.
n >= other.n:
216 return self.
nt <= other.nt
218 other.longest_run_of_spaces()) + 1
220 for ts
in range(2, n+1):
231 other.longest_run_of_spaces()) + 1
233 for ts
in range(1, n+1):
237 other.indent_level(ts)) )
242 firsts = map(
lambda tup:
str(tup[0]), w)
243 prefix =
"at tab size"
245 prefix = prefix +
"s"
249 INDENT = tokenize.INDENT
250 DEDENT = tokenize.DEDENT
251 NEWLINE = tokenize.NEWLINE
252 JUNK = tokenize.COMMENT, tokenize.NL
256 for (type, token, start, end, line)
in tokens:
268 if not indents[-1].less(thisguy):
269 witness = indents[-1].not_less_witness(thisguy)
272 indents.append(thisguy)
288 elif check_equal
and type
not in JUNK:
297 if not indents[-1].equal(thisguy):
298 witness = indents[-1].not_equal_witness(thisguy)
303 if __name__ ==
'__main__':