My favorites | Sign in
Project Home Downloads Wiki Issues Source
Repository:
Checkout   Browse   Changes   Clones  
Changes to /src/lepl/matchers/combine.py
cdbfc4c97d39 vs. c0c284c09268 Compare: vs.  Format:
Revision c0c284c09268
Go to: 
Project members, sign in to write a code review
/src/lepl/matchers/combine.py   cdbfc4c97d39 /src/lepl/matchers/combine.py   c0c284c09268
1 1
2 # The contents of this file are subject to the Mozilla Public License 2 # The contents of this file are subject to the Mozilla Public License
3 # (MPL) Version 1.1 (the "License"); you may not use this file except 3 # (MPL) Version 1.1 (the "License"); you may not use this file except
4 # in compliance with the License. You may obtain a copy of the License 4 # in compliance with the License. You may obtain a copy of the License
5 # at http://www.mozilla.org/MPL/ 5 # at http://www.mozilla.org/MPL/
6 # 6 #
7 # Software distributed under the License is distributed on an "AS IS" 7 # Software distributed under the License is distributed on an "AS IS"
8 # basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See 8 # basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
9 # the License for the specific language governing rights and 9 # the License for the specific language governing rights and
10 # limitations under the License. 10 # limitations under the License.
11 # 11 #
12 # The Original Code is LEPL (http://www.acooke.org/lepl) 12 # The Original Code is LEPL (http://www.acooke.org/lepl)
13 # The Initial Developer of the Original Code is Andrew Cooke. 13 # The Initial Developer of the Original Code is Andrew Cooke.
14 # Portions created by the Initial Developer are Copyright (C) 2009-2010 14 # Portions created by the Initial Developer are Copyright (C) 2009-2010
15 # Andrew Cooke (andrew@acooke.org). All Rights Reserved. 15 # Andrew Cooke (andrew@acooke.org). All Rights Reserved.
16 # 16 #
17 # Alternatively, the contents of this file may be used under the terms 17 # Alternatively, the contents of this file may be used under the terms
18 # of the LGPL license (the GNU Lesser General Public License, 18 # of the LGPL license (the GNU Lesser General Public License,
19 # http://www.gnu.org/licenses/lgpl.html), in which case the provisions 19 # http://www.gnu.org/licenses/lgpl.html), in which case the provisions
20 # of the LGPL License are applicable instead of those above. 20 # of the LGPL License are applicable instead of those above.
21 # 21 #
22 # If you wish to allow use of your version of this file only under the 22 # If you wish to allow use of your version of this file only under the
23 # terms of the LGPL License and not to allow others to use your version 23 # terms of the LGPL License and not to allow others to use your version
24 # of this file under the MPL, indicate your decision by deleting the 24 # of this file under the MPL, indicate your decision by deleting the
25 # provisions above and replace them with the notice and other provisions 25 # provisions above and replace them with the notice and other provisions
26 # required by the LGPL License. If you do not delete the provisions 26 # required by the LGPL License. If you do not delete the provisions
27 # above, a recipient may use your version of this file under either the 27 # above, a recipient may use your version of this file under either the
28 # MPL or the LGPL License. 28 # MPL or the LGPL License.
29 29
30 ''' 30 '''
31 Matchers that combine sub-matchers (And, Or etc). 31 Matchers that combine sub-matchers (And, Or etc).
32 ''' 32 '''
33 33
34 # pylint: disable-msg=C0103,W0212 34 # pylint: disable-msg=C0103,W0212
35 # (consistent interfaces) 35 # (consistent interfaces)
36 # pylint: disable-msg=E1101 36 # pylint: disable-msg=E1101
37 # (_args create attributes) 37 # (_args create attributes)
38 # pylint: disable-msg=R0901, R0904, W0142 38 # pylint: disable-msg=R0901, R0904, W0142
39 # lepl conventions 39 # lepl conventions
40 40
41 from abc import ABCMeta 41 from abc import ABCMeta
42 from collections import deque 42 from collections import deque
43 from operator import __add__ 43 from operator import __add__
44 44
45 from lepl.core.parser import tagged
46 from lepl.core.manager import _GeneratorManager
47 from lepl.matchers.core import Literal 45 from lepl.matchers.core import Literal
48 from lepl.matchers.matcher import add_children 46 from lepl.matchers.matcher import add_children
49 from lepl.matchers.support import coerce_, sequence_matcher_factory, \ 47 from lepl.matchers.support import coerce_, sequence_matcher_factory, \
50 trampoline_matcher_factory, to, OperatorMatcher 48 trampoline_matcher_factory, to
51 from lepl.matchers.transform import Transformable 49 from lepl.matchers.transform import Transformable
52 from lepl.support.lib import lmap, fmt, document 50 from lepl.support.lib import lmap, fmt, document
53 51
54 52
55 # pylint: disable-msg=C0103, W0105 53 # pylint: disable-msg=C0103, W0105
56 # Python 2.6 54 # Python 2.6
57 #class BaseSearch(metaclass=ABCMeta): 55 #class BaseSearch(metaclass=ABCMeta):
58 _BaseSearch = ABCMeta('_BaseSearch', (object, ), {}) 56 _BaseSearch = ABCMeta('_BaseSearch', (object, ), {})
59 ''' 57 '''
60 ABC used to identify matchers. 58 ABC used to identify matchers.
61 59
62 Note that graph traversal assumes subclasses are hashable and iterable. 60 Note that graph traversal assumes subclasses are hashable and iterable.
63 ''' 61 '''
64 62
65 class BaseSearch(_BaseSearch): 63 class BaseSearch(_BaseSearch):
66 pass 64 pass
67 65
68 66
69 def search_factory(factory): 67 def search_factory(factory):
70 ''' 68 '''
71 Add the arg processing common to all searching. 69 Add the arg processing common to all searching.
72 ''' 70 '''
73 def new_factory(first, start, stop, rest=None, reduce=None, 71 def new_factory(first, start, stop, rest=None, reduce=None,
74 generator_manager_queue_len=None): 72 generator_manager_queue_len=None):
75 rest = first if rest is None else rest 73 rest = first if rest is None else rest
76 reduce = reduce if reduce else ([], __add__) 74 reduce = reduce if reduce else ([], __add__)
77 return factory(first, start, stop, rest, reduce, 75 return factory(first, start, stop, rest, reduce,
78 generator_manager_queue_len) 76 generator_manager_queue_len)
79 return document(new_factory, factory) 77 return document(new_factory, factory)
80 78
81 79
82 @trampoline_matcher_factory(first=to(Literal), rest=to(Literal)) 80 @trampoline_matcher_factory(first=to(Literal), rest=to(Literal))
83 @search_factory 81 @search_factory
84 def DepthFirst(first, start, stop, rest, reduce, generator_manager_queue_len): 82 def DepthFirst(first, start, stop, rest, reduce, generator_manager_queue_len):
85 ''' 83 '''
86 (Post order) Depth first repetition (typically used via `Repeat`). 84 (Post order) Depth first repetition (typically used via `Repeat`).
87 ''' 85 '''
88 (zero, join) = reduce 86 (zero, join) = reduce
89 def match(support, stream): 87 def match(support, stream):
90 stack = deque() 88 stack = deque()
91 try: 89 try:
92 stack.append((0, zero, stream, first._match(stream))) 90 stack.append((0, zero, stream, first._match(stream)))
93 stream = None 91 stream = None
94 while stack: 92 while stack:
95 (count1, acc1, stream1, generator) = stack[-1] 93 (count1, acc1, stream1, generator) = stack[-1]
96 extended = False 94 extended = False
97 if stop is None or count1 < stop: 95 if stop is None or count1 < stop:
98 count2 = count1 + 1 96 count2 = count1 + 1
99 try: 97 try:
100 (value, stream2) = yield generator 98 (value, stream2) = yield generator
101 acc2 = join(acc1, value) 99 acc2 = join(acc1, value)
102 stack.append((count2, acc2, stream2, rest._match(stream2))) 100 stack.append((count2, acc2, stream2, rest._match(stream2)))
103 extended = True 101 extended = True
104 except StopIteration: 102 except StopIteration:
105 pass 103 pass
106 if not extended: 104 if not extended:
107 if count1 >= start and (stop is None or count1 <= stop): 105 if count1 >= start and (stop is None or count1 <= stop):
108 yield (acc1, stream1) 106 yield (acc1, stream1)
109 stack.pop() 107 stack.pop()
110 while support.generator_manager_queue_len \ 108 while support.generator_manager_queue_len \
111 and len(stack) > support.generator_manager_queue_len: 109 and len(stack) > support.generator_manager_queue_len:
112 stack.popleft()[3].generator.close() 110 stack.popleft()[3].generator.close()
113 finally: 111 finally:
114 while stack: 112 while stack:
115 stack.popleft()[3].generator.close() 113 stack.popleft()[3].generator.close()
116 114
117 return match 115 return match
118 116
119 117
120 @trampoline_matcher_factory(first=to(Literal), rest=to(Literal)) 118 @trampoline_matcher_factory(first=to(Literal), rest=to(Literal))
121 @search_factory 119 @search_factory
122 def BreadthFirst(first, start, stop, rest, reduce, generator_manager_queue_len): 120 def BreadthFirst(first, start, stop, rest, reduce, generator_manager_queue_len):
123 ''' 121 '''
124 (Level order) Breadth first repetition (typically used via `Repeat`). 122 (Level order) Breadth first repetition (typically used via `Repeat`).
125 ''' 123 '''
126 (zero, join) = reduce 124 (zero, join) = reduce
127 def match(support, stream): 125 def match(support, stream):
128 queue = deque() 126 queue = deque()
129 try: 127 try:
130 queue.append((0, zero, stream, first._match(stream))) 128 queue.append((0, zero, stream, first._match(stream)))
131 stream = None 129 stream = None
132 while queue: 130 while queue:
133 (count1, acc1, stream1, generator) = queue.popleft() 131 (count1, acc1, stream1, generator) = queue.popleft()
134 if count1 >= start and (stop is None or count1 <= stop): 132 if count1 >= start and (stop is None or count1 <= stop):
135 yield (acc1, stream1) 133 yield (acc1, stream1)
136 count2 = count1 + 1 134 count2 = count1 + 1
137 try: 135 try:
138 while True: 136 while True:
139 (value, stream2) = yield generator 137 (value, stream2) = yield generator
140 acc2 = join(acc1, value) 138 acc2 = join(acc1, value)
141 if stop is None or count2 <= stop: 139 if stop is None or count2 <= stop:
142 queue.append((count2, acc2, stream2, rest._match(stream2))) 140 queue.append((count2, acc2, stream2, rest._match(stream2)))
143 except StopIteration: 141 except StopIteration:
144 pass 142 pass
145 while support.generator_manager_queue_len \ 143 while support.generator_manager_queue_len \
146 and len(queue) > support.generator_manager_queue_len: 144 and len(queue) > support.generator_manager_queue_len:
147 queue.popleft()[3].generator.close() 145 queue.popleft()[3].generator.close()
148 finally: 146 finally:
149 while queue: 147 while queue:
150 queue.popleft()[3].generator.close() 148 queue.popleft()[3].generator.close()
151 149
152 return match 150 return match
153 151
154 152
155 @trampoline_matcher_factory(matcher=to(Literal)) 153 @trampoline_matcher_factory(matcher=to(Literal))
156 def OrderByResultCount(matcher, ascending=True): 154 def OrderByResultCount(matcher, ascending=True):
157 ''' 155 '''
158 Modify a matcher to return results in length order. 156 Modify a matcher to return results in length order.
159 ''' 157 '''
160 158
161 def match(support, stream): 159 def match(support, stream):
162 ''' 160 '''
163 Attempt to match the stream. 161 Attempt to match the stream.
164 ''' 162 '''
165 generator = matcher._match(stream) 163 generator = matcher._match(stream)
166 results = [] 164 results = []
167 try: 165 try:
168 while True: 166 while True:
169 # syntax error if this on one line?! 167 # syntax error if this on one line?!
170 result = yield generator 168 result = yield generator
171 results.append(result) 169 results.append(result)
172 except StopIteration: 170 except StopIteration:
173 pass 171 pass
174 for result in sorted(results, 172 for result in sorted(results,
175 key=lambda x: len(x[0]), reverse=ascending): 173 key=lambda x: len(x[0]), reverse=ascending):
176 yield result 174 yield result
177 175
178 return match 176 return match
179 177
180 178
181 @sequence_matcher_factory(first=to(Literal), rest=to(Literal)) 179 @sequence_matcher_factory(first=to(Literal), rest=to(Literal))
182 @search_factory 180 @search_factory
183 def DepthNoTrampoline(first, start, stop, rest, reduce, generator_manager_queue_len): 181 def DepthNoTrampoline(first, start, stop, rest, reduce, generator_manager_queue_len):
184 ''' 182 '''
185 A more efficient search when all matchers are functions (so no need to 183 A more efficient search when all matchers are functions (so no need to
186 trampoline). Depth first (greedy). 184 trampoline). Depth first (greedy).
187 ''' 185 '''
188 (zero, join) = reduce 186 (zero, join) = reduce
189 def matcher(support, stream): 187 def matcher(support, stream):
190 stack = deque() 188 stack = deque()
191 try: 189 try:
192 stack.append((0, zero, stream, first._untagged_match(stream))) 190 stack.append((0, zero, stream, first._untagged_match(stream)))
193 stream = None 191 stream = None
194 while stack: 192 while stack:
195 (count1, acc1, stream1, generator) = stack[-1] 193 (count1, acc1, stream1, generator) = stack[-1]
196 extended = False 194 extended = False
197 if stop is None or count1 < stop: 195 if stop is None or count1 < stop:
198 count2 = count1 + 1 196 count2 = count1 + 1
199 try: 197 try:
200 (value, stream2) = next(generator) 198 (value, stream2) = next(generator)
201 acc2 = join(acc1, value) 199 acc2 = join(acc1, value)
202 stack.append((count2, acc2, stream2, 200 stack.append((count2, acc2, stream2,
203 rest._untagged_match(stream2))) 201 rest._untagged_match(stream2)))
204 extended = True 202 extended = True
205 except StopIteration: 203 except StopIteration:
206 pass 204 pass
207 if not extended: 205 if not extended:
208 if count1 >= start and (stop is None or count1 <= stop): 206 if count1 >= start and (stop is None or count1 <= stop):
209 yield (acc1, stream1) 207 yield (acc1, stream1)
210 stack.pop() 208 stack.pop()
211 while support.generator_manager_queue_len \ 209 while support.generator_manager_queue_len \
212 and len(stack) > support.generator_manager_queue_len: 210 and len(stack) > support.generator_manager_queue_len:
213 stack.popleft()[3].close() 211 stack.popleft()[3].close()
214 finally: 212 finally:
215 while stack: 213 while stack:
216 stack.popleft()[3].close() 214 stack.popleft()[3].close()
217 215
218 return matcher 216 return matcher
219 217
220 218
221 @sequence_matcher_factory(first=to(Literal), rest=to(Literal)) 219 @sequence_matcher_factory(first=to(Literal), rest=to(Literal))
222 @search_factory 220 @search_factory
223 def BreadthNoTrampoline(first, start, stop, rest, reduce, generator_manager_queue_len): 221 def BreadthNoTrampoline(first, start, stop, rest, reduce, generator_manager_queue_len):
224 ''' 222 '''
225 A more efficient search when all matchers are functions (so no need to 223 A more efficient search when all matchers are functions (so no need to
226 trampoline). Breadth first (non-greedy). 224 trampoline). Breadth first (non-greedy).
227 ''' 225 '''
228 (zero, join) = reduce 226 (zero, join) = reduce
229 def matcher(support, stream): 227 def matcher(support, stream):
230 queue = deque() 228 queue = deque()
231 try: 229 try:
232 queue.append((0, zero, stream, first._untagged_match(stream))) 230 queue.append((0, zero, stream, first._untagged_match(stream)))
233 stream = None 231 stream = None
234 while queue: 232 while queue:
235 (count1, acc1, stream1, generator) = queue.popleft() 233 (count1, acc1, stream1, generator) = queue.popleft()
236 if count1 >= start and (stop is None or count1 <= stop): 234 if count1 >= start and (stop is None or count1 <= stop):
237 yield (acc1, stream1) 235 yield (acc1, stream1)
238 count2 = count1 + 1 236 count2 = count1 + 1
239 for (value, stream2) in generator: 237 for (value, stream2) in generator:
240 acc2 = join(acc1, value) 238 acc2 = join(acc1, value)
241 if stop is None or count2 <= stop: 239 if stop is None or count2 <= stop:
242 queue.append((count2, acc2, stream2, 240 queue.append((count2, acc2, stream2,
243 rest._untagged_match(stream2))) 241 rest._untagged_match(stream2)))
244 while support.generator_manager_queue_len \ 242 while support.generator_manager_queue_len \
245 and len(queue) > support.generator_manager_queue_len: 243 and len(queue) > support.generator_manager_queue_len:
246 queue.popleft()[3].close() 244 queue.popleft()[3].close()
247 finally: 245 finally:
248 while queue: 246 while queue:
249 queue.popleft()[3].close() 247 queue.popleft()[3].close()
250 248
251 return matcher 249 return matcher
252 250
253 251
254 add_children(BaseSearch, DepthFirst, BreadthFirst, \ 252 add_children(BaseSearch, DepthFirst, BreadthFirst, \
255 DepthNoTrampoline, BreadthNoTrampoline) 253 DepthNoTrampoline, BreadthNoTrampoline)
256 254
257 255
258 class _BaseCombiner(Transformable): 256 class _BaseCombiner(Transformable):
259 ''' 257 '''
260 Support for `And` and `Or`. 258 Support for `And` and `Or`.
261 ''' 259 '''
262 260
263 def __init__(self, *matchers): 261 def __init__(self, *matchers):
264 super(_BaseCombiner, self).__init__() 262 super(_BaseCombiner, self).__init__()
265 self._args(matchers=lmap(coerce_, matchers)) 263 self._args(matchers=lmap(coerce_, matchers))
266 264
267 def compose(self, wrapper): 265 def compose(self, wrapper):
268 ''' 266 '''
269 Generate a new instance with the composed function from the Transform. 267 Generate a new instance with the composed function from the Transform.
270 ''' 268 '''
271 copy = type(self)(*self.matchers) 269 copy = type(self)(*self.matchers)
272 copy.wrapper = self.wrapper.compose(wrapper) 270 copy.wrapper = self.wrapper.compose(wrapper)
273 return copy 271 return copy
274 272
275 273
276 @trampoline_matcher_factory(args_=to(Literal)) 274 @trampoline_matcher_factory(args_=to(Literal))
277 def And(*matchers): 275 def And(*matchers):
278 ''' 276 '''
279 Match one or more matchers in sequence (**&**). 277 Match one or more matchers in sequence (**&**).
280 It can be used indirectly by placing ``&`` between matchers. 278 It can be used indirectly by placing ``&`` between matchers.
281 ''' 279 '''
282 # matchers = lmap(coerce_, matchers) 280 # matchers = lmap(coerce_, matchers)
283 281
284 def match(support, stream_in): 282 def match(support, stream_in):
285 if matchers: 283 if matchers:
286 stack = deque([([], 284 stack = deque([([],
287 matchers[0]._match(stream_in), 285 matchers[0]._match(stream_in),
288 matchers[1:])]) 286 matchers[1:])])
289 append = stack.append 287 append = stack.append
290 pop = stack.pop 288 pop = stack.pop
291 stream_in = None 289 stream_in = None
292 try: 290 try:
293 while stack: 291 while stack:
294 (result, generator, queued) = pop() 292 (result, generator, queued) = pop()
295 try: 293 try:
296 (value, stream_out) = yield generator 294 (value, stream_out) = yield generator
297 append((result, generator, queued)) 295 append((result, generator, queued))
298 if queued: 296 if queued:
299 append((result+value, 297 append((result+value,
300 queued[0]._match(stream_out), 298 queued[0]._match(stream_out),
301 queued[1:])) 299 queued[1:]))
302 else: 300 else:
303 yield (result+value, stream_out) 301 yield (result+value, stream_out)
304 except StopIteration: 302 except StopIteration:
305 pass 303 pass
306 finally: 304 finally:
307 for (result, generator, queued) in stack: 305 for (result, generator, queued) in stack:
308 generator.generator.close() 306 generator.generator.close()
309 307
310 return match 308 return match
311 309
312 310
313 @sequence_matcher_factory(args_=to(Literal)) 311 @sequence_matcher_factory(args_=to(Literal))
314 def AndNoTrampoline(*matchers): 312 def AndNoTrampoline(*matchers):
315 ''' 313 '''
316 Used as an optimisation when sub-matchers do not require the trampoline. 314 Used as an optimisation when sub-matchers do not require the trampoline.
317 ''' 315 '''
318 def matcher(support, stream_in): 316 def matcher(support, stream_in):
319 if matchers: 317 if matchers:
320 stack = deque([([], matchers[0]._untagged_match(stream_in), matchers[1:])]) 318 stack = deque([([], matchers[0]._untagged_match(stream_in), matchers[1:])])
321 append = stack.append 319 append = stack.append
322 pop = stack.pop 320 pop = stack.pop
323 try: 321 try:
324 while stack: 322 while stack:
325 (result, generator, queued) = pop() 323 (result, generator, queued) = pop()
326 try: 324 try:
327 (value, stream_out) = next(generator) 325 (value, stream_out) = next(generator)
328 append((result, generator, queued)) 326 append((result, generator, queued))
329 if queued: 327 if queued:
330 append((result+value, 328 append((result+value,
331 queued[0]._untagged_match(stream_out), 329 queued[0]._untagged_match(stream_out),
332 queued[1:])) 330 queued[1:]))
333 else: 331 else:
334 yield (result+value, stream_out) 332 yield (result+value, stream_out)
335 except StopIteration: 333 except StopIteration:
336 pass 334 pass
337 finally: 335 finally:
338 for (result, generator, queued) in stack: 336 for (result, generator, queued) in stack:
339 generator.close() 337 generator.close()
340 338
341 return matcher 339 return matcher
342 340
343 341
344 @trampoline_matcher_factory(args_=to(Literal)) 342 @trampoline_matcher_factory(args_=to(Literal))
345 def Or(*matchers): 343 def Or(*matchers):
346 ''' 344 '''
347 Match one of the given matchers (**|**). 345 Match one of the given matchers (**|**).
348 It can be used indirectly by placing ``|`` between matchers. 346 It can be used indirectly by placing ``|`` between matchers.
349 347
350 Matchers are tried from left to right until one succeeds; backtracking 348 Matchers are tried from left to right until one succeeds; backtracking
351 will try more from the same matcher and, once that is exhausted, 349 will try more from the same matcher and, once that is exhausted,
352 continue to the right. String arguments will be coerced to 350 continue to the right. String arguments will be coerced to
353 literal matches. 351 literal matches.
354 ''' 352 '''
355 353
356 def match(support, stream_in): 354 def match(support, stream_in):
357 ''' 355 '''
358 Do the matching (return a generator that provides successive 356 Do the matching (return a generator that provides successive
359 (result, stream) tuples). The result will correspond to one of the 357 (result, stream) tuples). The result will correspond to one of the
360 sub-matchers (starting from the left). 358 sub-matchers (starting from the left).
361 ''' 359 '''
362 for matcher in matchers: 360 for matcher in matchers:
363 generator = matcher._match(stream_in) 361 generator = matcher._match(stream_in)
364 try: 362 try:
365 while True: 363 while True:
366 yield (yield generator) 364 yield (yield generator)
367 except StopIteration: 365 except StopIteration:
368 pass 366 pass
369 367
370 return match 368 return match
371 369
372 370
373 @sequence_matcher_factory(args_=to(Literal)) 371 @sequence_matcher_factory(args_=to(Literal))
374 def OrNoTrampoline(*matchers): 372 def OrNoTrampoline(*matchers):
375 ''' 373 '''
376 Used as an optimisation when sub-matchers do not require the trampoline. 374 Used as an optimisation when sub-matchers do not require the trampoline.
377 ''' 375 '''
378 def match(support, stream_in): 376 def match(support, stream_in):
379 ''' 377 '''
380 Do the matching (return a generator that provides successive 378 Do the matching (return a generator that provides successive
381 (result, stream) tuples). The result will correspond to one of the 379 (result, stream) tuples). The result will correspond to one of the
382 sub-matchers (starting from the left). 380 sub-matchers (starting from the left).
383 ''' 381 '''
384 for matcher in matchers: 382 for matcher in matchers:
385 for result in matcher._untagged_match(stream_in): 383 for result in matcher._untagged_match(stream_in):
386 yield result 384 yield result
387 return match 385 return match
388 386
389 387
390 @trampoline_matcher_factory() 388 @trampoline_matcher_factory()
391 def First(*matchers): 389 def First(*matchers):
392 ''' 390 '''
393 Match the first successful matcher only (**%**). 391 Match the first successful matcher only (**%**).
394 It can be used indirectly by placing ``%`` between matchers. 392 It can be used indirectly by placing ``%`` between matchers.
395 Note that backtracking for the first-selected matcher will still occur. 393 Note that backtracking for the first-selected matcher will still occur.
396 394
397 Matchers are tried from left to right until one succeeds; backtracking 395 Matchers are tried from left to right until one succeeds; backtracking
398 will try more from the same matcher (only). String arguments will be 396 will try more from the same matcher (only). String arguments will be
399 coerced to literal matches. 397 coerced to literal matches.
400 ''' 398 '''
401 def match(support, stream): 399 def match(support, stream):
402 matched = False 400 matched = False
403 for matcher in support.matchers: 401 for matcher in support.matchers:
404 generator = matcher._match(stream) 402 generator = matcher._match(stream)
405 try: 403 try:
406 while True: 404 while True:
407 yield (yield generator) 405 yield (yield generator)
408 matched = True 406 matched = True
409 except StopIteration: 407 except StopIteration:
410 pass 408 pass
411 if matched: 409 if matched:
412 break 410 break
413 411
414 return match 412 return match
415 413
416 414
417 @trampoline_matcher_factory(args_=to(Literal)) 415 @trampoline_matcher_factory(args_=to(Literal))
418 def Difference(matcher, exclude, count=-1): 416 def Difference(matcher, exclude, count=-1):
419 ''' 417 '''
420 Match with `matcher`, but exclude any matches that would be made by 418 Match with `matcher`, but exclude any matches that would be made by
421 `exclude`. This is implemented by comparing the remaining stream after 419 `exclude`. This is implemented by comparing the remaining stream after
422 matching, so will not be affected by any transform associated with 420 matching, so will not be affected by any transform associated with
423 `exclude`. The `count` parameter gives the number of different matches 421 `exclude`. The `count` parameter gives the number of different matches
424 from `exclude`. By default (-1) all matches are used. A positive 422 from `exclude`. By default (-1) all matches are used. A positive
425 value restricts that to the number given. 423 value restricts that to the number given.
426 ''' 424 '''
427 def match(support, stream, count=count): 425 def match(support, stream, count=count):
428 426
429 # by default use a set; fall back to a list for unhashable streams 427 # by default use a set; fall back to a list for unhashable streams
430 bad = [None] 428 bad = [None]
431 grow_bad = None 429 grow_bad = None
432 def append_bad(value, bad=bad): 430 def append_bad(value, bad=bad):
433 bad[0].append(value) 431 bad[0].append(value)
434 def add_bad(value, bad=bad): 432 def add_bad(value, bad=bad):
435 if bad[0] is None: 433 if bad[0] is None:
436 bad[0] = set() 434 bad[0] = set()
437 try: 435 try:
438 bad[0].add(value) 436 bad[0].add(value)
439 except TypeError: 437 except TypeError:
440 assert not bad[0] 438 assert not bad[0]
441 bad[0] = [] 439 bad[0] = []
442 grow_bad = append_bad 440 grow_bad = append_bad
443 grow_bad(value) 441 grow_bad(value)
444 grow_bad = add_bad 442 grow_bad = add_bad
445 443
446 generator = matcher._match(stream) 444 generator = matcher._match(stream)
447 while True: 445 while True:
448 (value, stream1) = yield generator 446 (value, stream1) = yield generator
449 447
450 if bad[0] is None: # build bad on demand, once 448 if bad[0] is None: # build bad on demand, once
451 bad_generator = exclude._match(stream) 449 bad_generator = exclude._match(stream)
452 try: 450 try:
453 while count: 451 while count:
454 (excluded, stream2) = yield bad_generator 452 (excluded, stream2) = yield bad_generator
455 support._debug(fmt('Exclude: {0!r}', excluded)) 453 support._debug(fmt('Exclude: {0!r}', excluded))
456 grow_bad(stream2) 454 grow_bad(stream2)
457 # limit number of matchers, if requested 455 # limit number of matchers, if requested
458 count -= 1 456 count -= 1
459 except StopIteration: 457 except StopIteration:
460 pass # all matches for exclude 458 pass # all matches for exclude
461 459
462 if stream1 not in bad[0]: 460 if stream1 not in bad[0]:
463 yield (value, stream1) 461 yield (value, stream1)
464 else: 462 else:
465 support._debug(fmt('Excluding: {0!r}', value)) 463 support._debug(fmt('Excluding: {0!r}', value))
466 464
467 return match 465 return match
468 466
469 467
470 @trampoline_matcher_factory(args_=to(Literal)) 468 @trampoline_matcher_factory(args_=to(Literal))
471 def Limit(match, count=1): 469 def Limit(match, count=1):
472 ''' 470 '''
473 Limit the backtracking for a given matcher. A negative `count` means no 471 Limit the backtracking for a given matcher. A negative `count` means no
474 limit. 472 limit.
475 ''' 473 '''
476 def matcher(support, stream, count=count): 474 def matcher(support, stream, count=count):
477 generator = match._match(stream) 475 generator = match._match(stream)
478 try: 476 try:
479 while count: 477 while count:
480 yield (yield generator) 478 yield (yield generator)
481 count -= 1 479 count -= 1
482 except StopIteration: 480 except StopIteration:
483 pass 481 pass
484 return matcher 482 return matcher
Powered by Google Project Hosting