1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25 __version__ = "3.0"
26 __tabversion__ = "3.0"
27
28 import re, sys, types, copy, os
29
30
31 try:
32
33 StringTypes = (types.StringType, types.UnicodeType)
34 except AttributeError:
35
36 StringTypes = (str, bytes)
37
38
39
40
41 if sys.version_info[0] < 3:
44 else:
47
48
49 _is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
50
51
52
53
56 self.args = (message,)
57 self.text = s
58
59
62 return "LexToken(%s,%r,%d,%d)" % (self.type,self.value,self.lineno,self.lexpos)
65
66
67
68
73 self.f.write((msg % args) + "\n")
74
75 - def warning(self,msg,*args,**kwargs):
76 self.f.write("WARNING: "+ (msg % args) + "\n")
77
78 - def error(self,msg,*args,**kwargs):
79 self.f.write("ERROR: " + (msg % args) + "\n")
80
81 info = critical
82 debug = critical
83
84
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
107 self.lexre = None
108
109
110
111 self.lexretext = None
112 self.lexstatere = {}
113 self.lexstateretext = {}
114 self.lexstaterenames = {}
115 self.lexstate = "INITIAL"
116 self.lexstatestack = []
117 self.lexstateinfo = None
118 self.lexstateignore = {}
119 self.lexstateerrorf = {}
120 self.lexreflags = 0
121 self.lexdata = None
122 self.lexpos = 0
123 self.lexlen = 0
124 self.lexerrorf = None
125 self.lextokens = None
126 self.lexignore = ""
127 self.lexliterals = ""
128 self.lexmodule = None
129 self.lineno = 1
130 self.lexoptimize = 0
131
132 - def clone(self,object=None):
133 c = copy.copy(self)
134
135
136
137
138
139 if object:
140 newtab = { }
141 for key, ritem in self.lexstatere.items():
142 newre = []
143 for cre, findex in ritem:
144 newfindex = []
145 for f in findex:
146 if not f or not f[0]:
147 newfindex.append(f)
148 continue
149 newfindex.append((getattr(object,f[0].__name__),f[1]))
150 newre.append((cre,newfindex))
151 newtab[key] = newre
152 c.lexstatere = newtab
153 c.lexstateerrorf = { }
154 for key, ef in self.lexstateerrorf.items():
155 c.lexstateerrorf[key] = getattr(object,ef.__name__)
156 c.lexmodule = object
157 return c
158
159
160
161
162 - def writetab(self,tabfile,outputdir=""):
163 if isinstance(tabfile,types.ModuleType):
164 return
165 basetabfilename = tabfile.split(".")[-1]
166 filename = os.path.join(outputdir,basetabfilename)+".py"
167 tf = open(filename,"w")
168 tf.write("# %s.py. This file automatically created by PLY (version %s). Don't edit!\n" % (tabfile,__version__))
169 tf.write("_tabversion = %s\n" % repr(__version__))
170 tf.write("_lextokens = %s\n" % repr(self.lextokens))
171 tf.write("_lexreflags = %s\n" % repr(self.lexreflags))
172 tf.write("_lexliterals = %s\n" % repr(self.lexliterals))
173 tf.write("_lexstateinfo = %s\n" % repr(self.lexstateinfo))
174
175 tabre = { }
176
177 initial = self.lexstatere["INITIAL"]
178 initialfuncs = []
179 for part in initial:
180 for f in part[1]:
181 if f and f[0]:
182 initialfuncs.append(f)
183
184 for key, lre in self.lexstatere.items():
185 titem = []
186 for i in range(len(lre)):
187 titem.append((self.lexstateretext[key][i],_funcs_to_names(lre[i][1],self.lexstaterenames[key][i])))
188 tabre[key] = titem
189
190 tf.write("_lexstatere = %s\n" % repr(tabre))
191 tf.write("_lexstateignore = %s\n" % repr(self.lexstateignore))
192
193 taberr = { }
194 for key, ef in self.lexstateerrorf.items():
195 if ef:
196 taberr[key] = ef.__name__
197 else:
198 taberr[key] = None
199 tf.write("_lexstateerrorf = %s\n" % repr(taberr))
200 tf.close()
201
202
203
204
206 if isinstance(tabfile,types.ModuleType):
207 lextab = tabfile
208 else:
209 if sys.version_info[0] < 3:
210 exec("import %s as lextab" % tabfile)
211 else:
212 env = { }
213 exec("import %s as lextab" % tabfile, env,env)
214 lextab = env['lextab']
215
216 if getattr(lextab,"_tabversion","0.0") != __version__:
217 raise ImportError("Inconsistent PLY version")
218
219 self.lextokens = lextab._lextokens
220 self.lexreflags = lextab._lexreflags
221 self.lexliterals = lextab._lexliterals
222 self.lexstateinfo = lextab._lexstateinfo
223 self.lexstateignore = lextab._lexstateignore
224 self.lexstatere = { }
225 self.lexstateretext = { }
226 for key,lre in lextab._lexstatere.items():
227 titem = []
228 txtitem = []
229 for i in range(len(lre)):
230 titem.append((re.compile(lre[i][0],lextab._lexreflags),_names_to_funcs(lre[i][1],fdict)))
231 txtitem.append(lre[i][0])
232 self.lexstatere[key] = titem
233 self.lexstateretext[key] = txtitem
234 self.lexstateerrorf = { }
235 for key,ef in lextab._lexstateerrorf.items():
236 self.lexstateerrorf[key] = fdict[ef]
237 self.begin('INITIAL')
238
239
240
241
250
251
252
253
255 if not state in self.lexstatere:
256 raise ValueError("Undefined state")
257 self.lexre = self.lexstatere[state]
258 self.lexretext = self.lexstateretext[state]
259 self.lexignore = self.lexstateignore.get(state,"")
260 self.lexerrorf = self.lexstateerrorf.get(state,None)
261 self.lexstate = state
262
263
264
265
267 self.lexstatestack.append(self.lexstate)
268 self.begin(state)
269
270
271
272
274 self.begin(self.lexstatestack.pop())
275
276
277
278
281
282
283
284
287
288
289
290
291
292
293
294
296
297 lexpos = self.lexpos
298 lexlen = self.lexlen
299 lexignore = self.lexignore
300 lexdata = self.lexdata
301
302 while lexpos < lexlen:
303
304 if lexdata[lexpos] in lexignore:
305 lexpos += 1
306 continue
307
308
309 for lexre,lexindexfunc in self.lexre:
310 m = lexre.match(lexdata,lexpos)
311 if not m: continue
312
313
314 tok = LexToken()
315 tok.value = m.group()
316 tok.lineno = self.lineno
317 tok.lexpos = lexpos
318
319 i = m.lastindex
320 func,tok.type = lexindexfunc[i]
321
322 if not func:
323
324 if tok.type:
325 self.lexpos = m.end()
326 return tok
327 else:
328 lexpos = m.end()
329 break
330
331 lexpos = m.end()
332
333
334
335 tok.lexer = self
336 self.lexmatch = m
337 self.lexpos = lexpos
338
339 newtok = func(tok)
340
341
342 if not newtok:
343 lexpos = self.lexpos
344 lexignore = self.lexignore
345 break
346
347
348 if not self.lexoptimize:
349 if not newtok.type in self.lextokens:
350 raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
351 func_code(func).co_filename, func_code(func).co_firstlineno,
352 func.__name__, newtok.type),lexdata[lexpos:])
353
354 return newtok
355 else:
356
357 if lexdata[lexpos] in self.lexliterals:
358 tok = LexToken()
359 tok.value = lexdata[lexpos]
360 tok.lineno = self.lineno
361 tok.type = tok.value
362 tok.lexpos = lexpos
363 self.lexpos = lexpos + 1
364 return tok
365
366
367 if self.lexerrorf:
368 tok = LexToken()
369 tok.value = self.lexdata[lexpos:]
370 tok.lineno = self.lineno
371 tok.type = "error"
372 tok.lexer = self
373 tok.lexpos = lexpos
374 self.lexpos = lexpos
375 newtok = self.lexerrorf(tok)
376 if lexpos == self.lexpos:
377
378 raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
379 lexpos = self.lexpos
380 if not newtok: continue
381 return newtok
382
383 self.lexpos = lexpos
384 raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos],lexpos), lexdata[lexpos:])
385
386 self.lexpos = lexpos + 1
387 if self.lexdata is None:
388 raise RuntimeError("No input string given with input()")
389 return None
390
391
394
396 t = self.token()
397 if t is None:
398 raise StopIteration
399 return t
400
401 __next__ = next
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
419 try:
420 raise RuntimeError
421 except RuntimeError:
422 e,b,t = sys.exc_info()
423 f = t.tb_frame
424 while levels > 0:
425 f = f.f_back
426 levels -= 1
427 ldict = f.f_globals.copy()
428 if f.f_globals != f.f_locals:
429 ldict.update(f.f_locals)
430
431 return ldict
432
433
434
435
436
437
438
439
441 result = []
442 for f,name in zip(funclist,namelist):
443 if f and f[0]:
444 result.append((name, f[1]))
445 else:
446 result.append(f)
447 return result
448
449
450
451
452
453
454
455
457 result = []
458 for n in namelist:
459 if n and n[0]:
460 result.append((fdict[n[0]],n[1]))
461 else:
462 result.append(n)
463 return result
464
465
466
467
468
469
470
471
472
502
503
504
505
506
507
508
509
510
511
513 nonstate = 1
514 parts = s.split("_")
515 for i in range(1,len(parts)):
516 if not parts[i] in names and parts[i] != 'ANY': break
517 if i > 1:
518 states = tuple(parts[1:i])
519 else:
520 states = ('INITIAL',)
521
522 if 'ANY' in states:
523 states = tuple(names)
524
525 tokenname = "_".join(parts[i:])
526 return (states,tokenname)
527
528
529
530
531
532
533
534
536 - def __init__(self,ldict,log=None,reflags=0):
537 self.ldict = ldict
538 self.error_func = None
539 self.tokens = []
540 self.reflags = reflags
541 self.stateinfo = { 'INITIAL' : 'inclusive'}
542 self.files = {}
543 self.error = 0
544
545 if log is None:
546 self.log = PlyLogger(sys.stderr)
547 else:
548 self.log = log
549
550
556
557
563
564
566 tokens = self.ldict.get("tokens",None)
567 if not tokens:
568 self.log.error("No token list is defined")
569 self.error = 1
570 return
571
572 if not isinstance(tokens,(list, tuple)):
573 self.log.error("tokens must be a list or tuple")
574 self.error = 1
575 return
576
577 if not tokens:
578 self.log.error("tokens is empty")
579 self.error = 1
580 return
581
582 self.tokens = tokens
583
584
586 terminals = {}
587 for n in self.tokens:
588 if not _is_identifier.match(n):
589 self.log.error("Bad token name '%s'",n)
590 self.error = 1
591 if n in terminals:
592 self.log.warning("Token '%s' multiply defined", n)
593 terminals[n] = 1
594
595
598
599
601 try:
602 for c in self.literals:
603 if not isinstance(c,StringTypes) or len(c) > 1:
604 self.log.error("Invalid literal %s. Must be a single character", repr(c))
605 self.error = 1
606 continue
607
608 except TypeError:
609 self.log.error("Invalid literals specification. literals must be a sequence of characters")
610 self.error = 1
611
613 self.states = self.ldict.get("states",None)
614
615 if self.states:
616 if not isinstance(self.states,(tuple,list)):
617 self.log.error("states must be defined as a tuple or list")
618 self.error = 1
619 else:
620 for s in self.states:
621 if not isinstance(s,tuple) or len(s) != 2:
622 self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')",repr(s))
623 self.error = 1
624 continue
625 name, statetype = s
626 if not isinstance(name,StringTypes):
627 self.log.error("State name %s must be a string", repr(name))
628 self.error = 1
629 continue
630 if not (statetype == 'inclusive' or statetype == 'exclusive'):
631 self.log.error("State type for state %s must be 'inclusive' or 'exclusive'",name)
632 self.error = 1
633 continue
634 if name in self.stateinfo:
635 self.log.error("State '%s' already defined",name)
636 self.error = 1
637 continue
638 self.stateinfo[name] = statetype
639
640
641
642
644 tsymbols = [f for f in self.ldict if f[:2] == 't_' ]
645
646
647
648 self.toknames = { }
649 self.funcsym = { }
650 self.strsym = { }
651 self.ignore = { }
652 self.errorf = { }
653
654 for s in self.stateinfo:
655 self.funcsym[s] = []
656 self.strsym[s] = []
657
658 if len(tsymbols) == 0:
659 self.log.error("No rules of the form t_rulename are defined")
660 self.error = 1
661 return
662
663 for f in tsymbols:
664 t = self.ldict[f]
665 states, tokname = _statetoken(f,self.stateinfo)
666 self.toknames[f] = tokname
667
668 if hasattr(t,"__call__"):
669 if tokname == 'error':
670 for s in states:
671 self.errorf[s] = t
672 elif tokname == 'ignore':
673 line = func_code(t).co_firstlineno
674 file = func_code(t).co_filename
675 self.log.error("%s:%d: Rule '%s' must be defined as a string",file,line,t.__name__)
676 self.error = 1
677 else:
678 for s in states:
679 self.funcsym[s].append((f,t))
680 elif isinstance(t, StringTypes):
681 if tokname == 'ignore':
682 for s in states:
683 self.ignore[s] = t
684 if "\\" in t:
685 self.log.warning("%s contains a literal backslash '\\'",f)
686
687 elif tokname == 'error':
688 self.log.error("Rule '%s' must be defined as a function", f)
689 self.error = 1
690 else:
691 for s in states:
692 self.strsym[s].append((f,t))
693 else:
694 self.log.error("%s not defined as a function or string", f)
695 self.error = 1
696
697
698 for f in self.funcsym.values():
699 if sys.version_info[0] < 3:
700 f.sort(lambda x,y: cmp(func_code(x[1]).co_firstlineno,func_code(y[1]).co_firstlineno))
701 else:
702
703 f.sort(key=lambda x: func_code(x[1]).co_firstlineno)
704
705
706 for s in self.strsym.values():
707 if sys.version_info[0] < 3:
708 s.sort(lambda x,y: (len(x[1]) < len(y[1])) - (len(x[1]) > len(y[1])))
709 else:
710
711 s.sort(key=lambda x: len(x[1]),reverse=True)
712
713
715 for state in self.stateinfo:
716
717
718
719
720 for fname, f in self.funcsym[state]:
721 line = func_code(f).co_firstlineno
722 file = func_code(f).co_filename
723 self.files[file] = 1
724
725 tokname = self.toknames[fname]
726 if isinstance(f, types.MethodType):
727 reqargs = 2
728 else:
729 reqargs = 1
730 nargs = func_code(f).co_argcount
731 if nargs > reqargs:
732 self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
733 self.error = 1
734 continue
735
736 if nargs < reqargs:
737 self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
738 self.error = 1
739 continue
740
741 if not f.__doc__:
742 self.log.error("%s:%d: No regular expression defined for rule '%s'",file,line,f.__name__)
743 self.error = 1
744 continue
745
746 try:
747 c = re.compile("(?P<%s>%s)" % (fname,f.__doc__), re.VERBOSE | self.reflags)
748 if c.match(""):
749 self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file,line,f.__name__)
750 self.error = 1
751 except re.error:
752 _etype, e, _etrace = sys.exc_info()
753 self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file,line,f.__name__,e)
754 if '#' in f.__doc__:
755 self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'",file,line, f.__name__)
756 self.error = 1
757
758
759 for name,r in self.strsym[state]:
760 tokname = self.toknames[name]
761 if tokname == 'error':
762 self.log.error("Rule '%s' must be defined as a function", name)
763 self.error = 1
764 continue
765
766 if not tokname in self.tokens and tokname.find("ignore_") < 0:
767 self.log.error("Rule '%s' defined for an unspecified token %s",name,tokname)
768 self.error = 1
769 continue
770
771 try:
772 c = re.compile("(?P<%s>%s)" % (name,r),re.VERBOSE | self.reflags)
773 if (c.match("")):
774 self.log.error("Regular expression for rule '%s' matches empty string",name)
775 self.error = 1
776 except re.error:
777 _etype, e, _etrace = sys.exc_info()
778 self.log.error("Invalid regular expression for rule '%s'. %s",name,e)
779 if '#' in r:
780 self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'",name)
781 self.error = 1
782
783 if not self.funcsym[state] and not self.strsym[state]:
784 self.log.error("No rules defined for state '%s'",state)
785 self.error = 1
786
787
788 efunc = self.errorf.get(state,None)
789 if efunc:
790 f = efunc
791 line = func_code(f).co_firstlineno
792 file = func_code(f).co_filename
793 self.files[file] = 1
794
795 if isinstance(f, types.MethodType):
796 reqargs = 2
797 else:
798 reqargs = 1
799 nargs = func_code(f).co_argcount
800 if nargs > reqargs:
801 self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
802 self.error = 1
803
804 if nargs < reqargs:
805 self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
806 self.error = 1
807
808 for f in self.files:
809 self.validate_file(f)
810
811
812
813
814
815
816
817
818
819
821 import os.path
822 base,ext = os.path.splitext(filename)
823 if ext != '.py': return
824
825 try:
826 f = open(filename)
827 lines = f.readlines()
828 f.close()
829 except IOError:
830 return
831
832 fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
833 sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
834
835 counthash = { }
836 linen = 1
837 for l in lines:
838 m = fre.match(l)
839 if not m:
840 m = sre.match(l)
841 if m:
842 name = m.group(1)
843 prev = counthash.get(name)
844 if not prev:
845 counthash[name] = linen
846 else:
847 self.log.error("%s:%d: Rule %s redefined. Previously defined on line %d",filename,linen,name,prev)
848 self.error = 1
849 linen += 1
850
851
852
853
854
855
856 -def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir="", debuglog=None, errorlog=None):
857 global lexer
858 ldict = None
859 stateinfo = { 'INITIAL' : 'inclusive'}
860 lexobj = Lexer()
861 lexobj.lexoptimize = optimize
862 global token,input
863
864 if errorlog is None:
865 errorlog = PlyLogger(sys.stderr)
866
867 if debug:
868 if debuglog is None:
869 debuglog = PlyLogger(sys.stderr)
870
871
872 if object: module = object
873
874 if module:
875 _items = [(k,getattr(module,k)) for k in dir(module)]
876 ldict = dict(_items)
877 else:
878 ldict = get_caller_module_dict(2)
879
880
881 linfo = LexerReflect(ldict,log=errorlog,reflags=reflags)
882 linfo.get_all()
883 if not optimize:
884 if linfo.validate_all():
885 raise SyntaxError("Can't build lexer")
886
887 if optimize and lextab:
888 try:
889 lexobj.readtab(lextab,ldict)
890 token = lexobj.token
891 input = lexobj.input
892 lexer = lexobj
893 return lexobj
894
895 except ImportError:
896 pass
897
898
899 if debug:
900 debuglog.info("lex: tokens = %r", linfo.tokens)
901 debuglog.info("lex: literals = %r", linfo.literals)
902 debuglog.info("lex: states = %r", linfo.stateinfo)
903
904
905 lexobj.lextokens = { }
906 for n in linfo.tokens:
907 lexobj.lextokens[n] = 1
908
909
910 if isinstance(linfo.literals,(list,tuple)):
911 lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
912 else:
913 lexobj.lexliterals = linfo.literals
914
915
916 stateinfo = linfo.stateinfo
917
918 regexs = { }
919
920 for state in stateinfo:
921 regex_list = []
922
923
924 for fname, f in linfo.funcsym[state]:
925 line = func_code(f).co_firstlineno
926 file = func_code(f).co_filename
927 regex_list.append("(?P<%s>%s)" % (fname,f.__doc__))
928 if debug:
929 debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",fname,f.__doc__, state)
930
931
932 for name,r in linfo.strsym[state]:
933 regex_list.append("(?P<%s>%s)" % (name,r))
934 if debug:
935 debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",name,r, state)
936
937 regexs[state] = regex_list
938
939
940
941 if debug:
942 debuglog.info("lex: ==== MASTER REGEXS FOLLOW ====")
943
944 for state in regexs:
945 lexre, re_text, re_names = _form_master_re(regexs[state],reflags,ldict,linfo.toknames)
946 lexobj.lexstatere[state] = lexre
947 lexobj.lexstateretext[state] = re_text
948 lexobj.lexstaterenames[state] = re_names
949 if debug:
950 for i in range(len(re_text)):
951 debuglog.info("lex: state '%s' : regex[%d] = '%s'",state, i, re_text[i])
952
953
954 for state,stype in stateinfo.items():
955 if state != "INITIAL" and stype == 'inclusive':
956 lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
957 lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
958 lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
959
960 lexobj.lexstateinfo = stateinfo
961 lexobj.lexre = lexobj.lexstatere["INITIAL"]
962 lexobj.lexretext = lexobj.lexstateretext["INITIAL"]
963
964
965 lexobj.lexstateignore = linfo.ignore
966 lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","")
967
968
969 lexobj.lexstateerrorf = linfo.errorf
970 lexobj.lexerrorf = linfo.errorf.get("INITIAL",None)
971 if not lexobj.lexerrorf:
972 errorlog.warning("No t_error rule is defined")
973
974
975 for s,stype in stateinfo.items():
976 if stype == 'exclusive':
977 if not s in linfo.errorf:
978 errorlog.warning("No error rule is defined for exclusive state '%s'", s)
979 if not s in linfo.ignore and lexobj.lexignore:
980 errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
981 elif stype == 'inclusive':
982 if not s in linfo.errorf:
983 linfo.errorf[s] = linfo.errorf.get("INITIAL",None)
984 if not s in linfo.ignore:
985 linfo.ignore[s] = linfo.ignore.get("INITIAL","")
986
987
988 token = lexobj.token
989 input = lexobj.input
990 lexer = lexobj
991
992
993 if lextab and optimize:
994 lexobj.writetab(lextab,outputdir)
995
996 return lexobj
997
998
999
1000
1001
1002
1003
1004 -def runmain(lexer=None,data=None):
1005 if not data:
1006 try:
1007 filename = sys.argv[1]
1008 f = open(filename)
1009 data = f.read()
1010 f.close()
1011 except IndexError:
1012 sys.stdout.write("Reading from standard input (type EOF to end):\n")
1013 data = sys.stdin.read()
1014
1015 if lexer:
1016 _input = lexer.input
1017 else:
1018 _input = input
1019 _input(data)
1020 if lexer:
1021 _token = lexer.token
1022 else:
1023 _token = token
1024
1025 while 1:
1026 tok = _token()
1027 if not tok: break
1028 sys.stdout.write("(%s,%r,%d,%d)\n" % (tok.type, tok.value, tok.lineno,tok.lexpos))
1029
1030
1031
1032
1033
1034
1035
1036
1038 def set_doc(f):
1039 if hasattr(r,"__call__"):
1040 f.__doc__ = r.__doc__
1041 else:
1042 f.__doc__ = r
1043 return f
1044 return set_doc
1045
1046
1047 Token = TOKEN
1048