home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyc (Python 1.5)
-
- '''Tokenization help for Python programs.
-
- This module exports a function called \'tokenize()\' that breaks a stream of
- text into Python tokens. It accepts a readline-like method which is called
- repeatedly to get the next line of input (or "" for EOF) and a "token-eater"
- function which is called once for each token found. The latter function is
- passed the token type, a string containing the token, the starting and
- ending (row, column) coordinates of the token, and the original line. It is
- designed to match the working of the Python tokenizer exactly, except that
- it produces COMMENT tokens for comments and gives type OP for all operators.'''
- __version__ = 'Ka-Ping Yee, 26 October 1997; patched, GvR 3/30/98'
- import string
- import re
- *
- tok_name[COMMENT] = 'COMMENT'
- tok_name[NL] = 'NL'
- (tokenprog, pseudoprog, single3prog, double3prog) = map(re.compile, (Token, PseudoToken, Single3, Double3))
- if __name__ == '__main__':
- if len(sys.argv) > 1:
- tokenize(open(sys.argv[1]).readline)
- else:
- tokenize(sys.stdin.readline)
-
-