""" (c) 1999 Luc Stepniewski <lstep@mail.dotcom.fr>

   Changelog:
       - 19990822: Project creation. Added code to manage the case where
       	           the monitored file is totally wiped.
   ToDo:
       - There's a problem where we cannot detect that a file has been
       erased and created at once, with a size equal or > to the old one.
"""
import string,os,time
from stat import *

def chomp(s):
    if s[-1:] == '\n': return s[:-1]
    else: return s

#############

class Tailer:
    def __init__(self, filename, delay = None):
        self.delay = delay or 2
        self.filename = filename
	self.analyze_function = None
        
    def watch(self):
        oldstat = None
        while oldstat == None:
            oldstat = self.getstat()
            time.sleep(self.delay)

        while 1:
            time.sleep(self.delay)
            try:
                newstat = self.getstat()
                if newstat == None:
                    # The file has been erased and doesn't exist anymore
                    oldstat = (0,0,0)
                elif newstat != oldstat:
                    # The file has changed somehow
                    
                    # Special case where the file is erased and rewritten
                    # at once and the new size is lower than the old
                    # (with '>' for example). 
                    if newstat[0] < oldstat[0]:
                        self.report(0)
                    else:
                        self.report(oldstat[0])
                        
                    oldstat = newstat
            except:
                pass
        
    def getstat(self):
        try:
            stat = os.stat(self.filename)
        except:
            return None
        else:
            return stat[ST_SIZE], stat[ST_MTIME]	#, stat[ST_ATIME]

    def set_analyzer(self, function_to_use):
        self.analyze_function = function_to_use

    def report(self, num):
        #try:
        f = open(self.filename, 'r')
        f.seek(num)
        # On a le choix entre tout lire d'un coup, et lire les elements
        # un par un. Pour l'instant, on lit tout d'un coup.
        all_lines = f.readlines()
        f.close()

        all_lines = map(fast_clean, all_lines)
	apply(self.analyze_function, ([all_lines]))

        #except:
        #    print 'Error accessing', self.filename


def fast_clean(line):
    line = chomp(line)
    line = string.strip(line)
    if line == '':
        return None
    else: return line

def analyze(lines):
    """ Analyse les lignes """
    #    map(sys.stderr.write,lines)
    for line in lines:
	print "[%s]" % line

if __name__ == '__main__':
     tailer = Tailer('/var/log/messages')
     tailer.set_analyzer(analyze)
     tailer.watch()