Vega strike Python Modules doc  0.5.1
Documentation of the " Modules " folder of Vega strike
 All Data Structures Namespaces Files Functions Variables
sre.py
Go to the documentation of this file.
1 #
2 # Secret Labs' Regular Expression Engine
3 #
4 # re-compatible interface for the sre matching engine
5 #
6 # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
7 #
8 # This version of the SRE library can be redistributed under CNRI's
9 # Python 1.6 license. For any other use, please contact Secret Labs
10 # AB (info@pythonware.com).
11 #
12 # Portions of this engine have been developed in cooperation with
13 # CNRI. Hewlett-Packard provided funding for 1.6 integration and
14 # other compatibility work.
15 #
16 
17 r"""Support for regular expressions (RE).
18 
19 This module provides regular expression matching operations similar to
20 those found in Perl. It supports both 8-bit and Unicode strings; both
21 the pattern and the strings being processed can contain null bytes and
22 characters outside the US ASCII range.
23 
24 Regular expressions can contain both special and ordinary characters.
25 Most ordinary characters, like "A", "a", or "0", are the simplest
26 regular expressions; they simply match themselves. You can
27 concatenate ordinary characters, so last matches the string 'last'.
28 
29 The special characters are:
30  "." Matches any character except a newline.
31  "^" Matches the start of the string.
32  "$" Matches the end of the string.
33  "*" Matches 0 or more (greedy) repetitions of the preceding RE.
34  Greedy means that it will match as many repetitions as possible.
35  "+" Matches 1 or more (greedy) repetitions of the preceding RE.
36  "?" Matches 0 or 1 (greedy) of the preceding RE.
37  *?,+?,?? Non-greedy versions of the previous three special characters.
38  {m,n} Matches from m to n repetitions of the preceding RE.
39  {m,n}? Non-greedy version of the above.
40  "\\" Either escapes special characters or signals a special sequence.
41  [] Indicates a set of characters.
42  A "^" as the first character indicates a complementing set.
43  "|" A|B, creates an RE that will match either A or B.
44  (...) Matches the RE inside the parentheses.
45  The contents can be retrieved or matched later in the string.
46  (?iLmsux) Set the I, L, M, S, U, or X flag for the RE (see below).
47  (?:...) Non-grouping version of regular parentheses.
48  (?P<name>...) The substring matched by the group is accessible by name.
49  (?P=name) Matches the text matched earlier by the group named name.
50  (?#...) A comment; ignored.
51  (?=...) Matches if ... matches next, but doesn't consume the string.
52  (?!...) Matches if ... doesn't match next.
53 
54 The special sequences consist of "\\" and a character from the list
55 below. If the ordinary character is not on the list, then the
56 resulting RE will match the second character.
57  \number Matches the contents of the group of the same number.
58  \A Matches only at the start of the string.
59  \Z Matches only at the end of the string.
60  \b Matches the empty string, but only at the start or end of a word.
61  \B Matches the empty string, but not at the start or end of a word.
62  \d Matches any decimal digit; equivalent to the set [0-9].
63  \D Matches any non-digit character; equivalent to the set [^0-9].
64  \s Matches any whitespace character; equivalent to [ \t\n\r\f\v].
65  \S Matches any non-whitespace character; equiv. to [^ \t\n\r\f\v].
66  \w Matches any alphanumeric character; equivalent to [a-zA-Z0-9_].
67  With LOCALE, it will match the set [0-9_] plus characters defined
68  as letters for the current locale.
69  \W Matches the complement of \w.
70  \\ Matches a literal backslash.
71 
72 This module exports the following functions:
73  match Match a regular expression pattern to the beginning of a string.
74  search Search a string for the presence of a pattern.
75  sub Substitute occurrences of a pattern found in a string.
76  subn Same as sub, but also return the number of substitutions made.
77  split Split a string by the occurrences of a pattern.
78  findall Find all occurrences of a pattern in a string.
79  compile Compile a pattern into a RegexObject.
80  purge Clear the regular expression cache.
81  escape Backslash all non-alphanumerics in a string.
82 
83 Some of the functions in this module takes flags as optional parameters:
84  I IGNORECASE Perform case-insensitive matching.
85  L LOCALE Make \w, \W, \b, \B, dependent on the current locale.
86  M MULTILINE "^" matches the beginning of lines as well as the string.
87  "$" matches the end of lines as well as the string.
88  S DOTALL "." matches any character at all, including the newline.
89  X VERBOSE Ignore whitespace and comments for nicer looking RE's.
90  U UNICODE Make \w, \W, \b, \B, dependent on the Unicode locale.
91 
92 This module also defines an exception 'error'.
93 
94 """
95 
96 import sys
97 import sre_compile
98 import sre_parse
99 
100 # public symbols
101 __all__ = [ "match", "search", "sub", "subn", "split", "findall",
102  "compile", "purge", "template", "escape", "I", "L", "M", "S", "X",
103  "U", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE",
104  "UNICODE", "error" ]
105 
106 __version__ = "2.2.1"
107 
108 # this module works under 1.5.2 and later. don't use string methods
109 import string
110 
111 # flags
112 I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE # ignore case
113 L = LOCALE = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
114 U = UNICODE = sre_compile.SRE_FLAG_UNICODE # assume unicode locale
115 M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
116 S = DOTALL = sre_compile.SRE_FLAG_DOTALL # make dot match newline
117 X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
118 
119 # sre extensions (experimental, don't rely on these)
120 T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
121 DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
122 
123 # sre exception
125 
126 # --------------------------------------------------------------------
127 # public interface
128 
129 def match(pattern, string, flags=0):
130  """Try to apply the pattern at the start of the string, returning
131  a match object, or None if no match was found."""
132  return _compile(pattern, flags).match(string)
133 
134 def search(pattern, string, flags=0):
135  """Scan through string looking for a match to the pattern, returning
136  a match object, or None if no match was found."""
137  return _compile(pattern, flags).search(string)
138 
139 def sub(pattern, repl, string, count=0):
140  """Return the string obtained by replacing the leftmost
141  non-overlapping occurrences of the pattern in string by the
142  replacement repl"""
143  return _compile(pattern, 0).sub(repl, string, count)
144 
145 def subn(pattern, repl, string, count=0):
146  """Return a 2-tuple containing (new_string, number).
147  new_string is the string obtained by replacing the leftmost
148  non-overlapping occurrences of the pattern in the source
149  string by the replacement repl. number is the number of
150  substitutions that were made."""
151  return _compile(pattern, 0).subn(repl, string, count)
152 
153 def split(pattern, string, maxsplit=0):
154  """Split the source string by the occurrences of the pattern,
155  returning a list containing the resulting substrings."""
156  return _compile(pattern, 0).split(string, maxsplit)
157 
158 def findall(pattern, string):
159  """Return a list of all non-overlapping matches in the string.
160 
161  If one or more groups are present in the pattern, return a
162  list of groups; this will be a list of tuples if the pattern
163  has more than one group.
164 
165  Empty matches are included in the result."""
166  return _compile(pattern, 0).findall(string)
167 
168 if sys.hexversion >= 0x02020000:
169  def finditer(pattern, string):
170  """Return an iterator over all non-overlapping matches in the
171  string. For each match, the iterator returns a match object.
172 
173  Empty matches are included in the result."""
174  return _compile(pattern, 0).finditer(string)
175 
176 def compile(pattern, flags=0):
177  "Compile a regular expression pattern, returning a pattern object."
178  return _compile(pattern, flags)
179 
180 def purge():
181  "Clear the regular expression cache"
182  _cache.clear()
183  _cache_repl.clear()
184 
185 def template(pattern, flags=0):
186  "Compile a template pattern, returning a pattern object"
187  return _compile(pattern, flags|T)
188 
189 def escape(pattern):
190  "Escape all non-alphanumeric characters in pattern."
191  s = list(pattern)
192  for i in range(len(pattern)):
193  c = pattern[i]
194  if not ("a" <= c <= "z" or "A" <= c <= "Z" or "0" <= c <= "9"):
195  if c == "\000":
196  s[i] = "\\000"
197  else:
198  s[i] = "\\" + c
199  return _join(s, pattern)
200 
201 # --------------------------------------------------------------------
202 # internals
203 
204 _cache = {}
205 _cache_repl = {}
206 
207 _pattern_type = type(sre_compile.compile("", 0))
208 
209 _MAXCACHE = 100
210 
211 def _join(seq, sep):
212  # internal: join into string having the same type as sep
213  return string.join(seq, sep[:0])
214 
215 def _compile(*key):
216  # internal: compile pattern
217  p = _cache.get(key)
218  if p is not None:
219  return p
220  pattern, flags = key
221  if type(pattern) is _pattern_type:
222  return pattern
223  if type(pattern) not in sre_compile.STRING_TYPES:
224  raise TypeError, "first argument must be string or compiled pattern"
225  try:
226  p = sre_compile.compile(pattern, flags)
227  except error, v:
228  raise error, v # invalid expression
229  if len(_cache) >= _MAXCACHE:
230  _cache.clear()
231  _cache[key] = p
232  return p
233 
234 def _compile_repl(*key):
235  # internal: compile replacement pattern
236  p = _cache_repl.get(key)
237  if p is not None:
238  return p
239  repl, pattern = key
240  try:
241  p = sre_parse.parse_template(repl, pattern)
242  except error, v:
243  raise error, v # invalid expression
244  if len(_cache_repl) >= _MAXCACHE:
245  _cache_repl.clear()
246  _cache_repl[key] = p
247  return p
248 
249 def _expand(pattern, match, template):
250  # internal: match.expand implementation hook
251  template = sre_parse.parse_template(template, pattern)
252  return sre_parse.expand_template(template, match)
253 
254 def _subx(pattern, template):
255  # internal: pattern.sub/subn implementation helper
256  template = _compile_repl(template, pattern)
257  if not template[0] and len(template[1]) == 1:
258  # literal replacement
259  return template[1][0]
260  def filter(match, template=template):
261  return sre_parse.expand_template(template, match)
262  return filter
263 
264 # register myself for pickling
265 
266 import copy_reg
267 
268 def _pickle(p):
269  return _compile, (p.pattern, p.flags)
270 
271 copy_reg.pickle(_pattern_type, _pickle, _compile)
272 
273 # --------------------------------------------------------------------
274 # experimental stuff (see python-dev discussions for details)
275 
276 class Scanner:
277  def __init__(self, lexicon, flags=0):
278  from sre_constants import BRANCH, SUBPATTERN
279  self.lexicon = lexicon
280  # combine phrases into a compound pattern
281  p = []
282  s = sre_parse.Pattern()
283  s.flags = flags
284  for phrase, action in lexicon:
285  p.append(sre_parse.SubPattern(s, [
286  (SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
287  ]))
288  p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
289  s.groups = len(p)
291  def scan(self, string):
292  result = []
293  append = result.append
294  match = self.scanner.scanner(string).match
295  i = 0
296  while 1:
297  m = match()
298  if not m:
299  break
300  j = m.end()
301  if i == j:
302  break
303  action = self.lexicon[m.lastindex-1][1]
304  if callable(action):
305  self.match = m
306  action = action(self, m.group())
307  if action is not None:
308  append(action)
309  i = j
310  return result, string[i:]