Spencer Rathbun
Here Sepencer's way to read, large files with Python, amazingly I have adopted nearly, the same strategy for one of my scripts at work...thanks to Dave Beazley for its fantastic job on genrators in Python
#!/usr/bin/env python
import gzip, bz2
import os
import fnmatch
def gen_find(filepat,top):
for path, dirlist, filelist in os.walk(top):
for name in fnmatch.filter(filelist,filepat):
yield os.path.join(path,name)
def gen_open(filenames):
for name in filenames:
if name.endswith(".gz"):
yield gzip.open(name)
elif name.endswith(".bz2"):
yield bz2.BZ2File(name)
else:
yield open(name)
def gen_cat(sources):
for s in sources:
for item in s:
yield item
def main(regex, searchDir):
fileNames = gen_find(regex,searchDir)
fileHandles = gen_open(fileNames)
fileLines = gen_cat(fileHandles)
for line in fileLines:
print line
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Search globbed files line by line', version='%(prog)s 1.0')
parser.add_argument('regex', type=str, default='*', help='Regular expression')
parser.add_argument('searchDir', , type=str, default='.', help='list of input files')
args = parser.parse_args()
main(args.regex, args.searchDir)