[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]

[PATCH 1/4] Add logpicker into utils



---
 utils/log_picker/Makefile.am  |   23 +++++
 utils/log_picker/__init__.py  |   96 +++++++++++++++++++++
 utils/log_picker/archiving.py |  117 +++++++++++++++++++++++++
 utils/log_picker/logmining.py |  191 +++++++++++++++++++++++++++++++++++++++++
 utils/log_picker/sending.py   |   80 +++++++++++++++++
 utils/logpicker               |  151 ++++++++++++++++++++++++++++++++
 6 files changed, 658 insertions(+), 0 deletions(-)
 create mode 100644 utils/log_picker/Makefile.am
 create mode 100644 utils/log_picker/__init__.py
 create mode 100644 utils/log_picker/archiving.py
 create mode 100644 utils/log_picker/logmining.py
 create mode 100644 utils/log_picker/sending.py
 create mode 100755 utils/logpicker

diff --git a/utils/log_picker/Makefile.am b/utils/log_picker/Makefile.am
new file mode 100644
index 0000000..b00ae64
--- /dev/null
+++ b/utils/log_picker/Makefile.am
@@ -0,0 +1,23 @@
+# log_picker/Makefile.am
+#
+# Copyright (C) 2010  Red Hat, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation; either version 2.1 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+pkgpyexecdir = $(pyexecdir)/log_picker
+logpickerdir = $(pkgpyexecdir)
+logpicker_PYTHON = *.py
+
+MAINTAINERCLEANFILES = Makefile.in
diff --git a/utils/log_picker/__init__.py b/utils/log_picker/__init__.py
new file mode 100644
index 0000000..e53c806
--- /dev/null
+++ b/utils/log_picker/__init__.py
@@ -0,0 +1,96 @@
+#!/usr/bin/python
+
+import os
+import sys
+import tempfile
+
+import archiving
+from archiving import ArchivationError
+from archiving import NoFilesArchivationError
+import sending
+from sending import SenderError
+import logmining
+from logmining import LogMinerError
+
+
+class LogPickerError(Exception):
+    pass
+
+
+class LogPicker(object):
+
+    def __init__(self, archive_obj=None, sender_obj=None, miners=[]):
+        self.sender_obj = sender_obj
+        self.archive_obj = archive_obj
+        self.miners = miners
+        
+        self.archive = None
+        self.tmpdir = None
+        self.files = []
+        self.filename = self._get_tmp_file("completelog")
+
+       
+    def _errprint(self, msg):
+        sys.stderr.write('%s\n' % msg)
+
+    
+    def _get_tmp_file(self, name, suffix="", register=True):
+        """Creates temp file."""
+        if not self.tmpdir:
+            self.tmpdir = tempfile.mkdtemp(prefix="z-logs-", dir="/tmp")
+        
+        fd, filename = tempfile.mkstemp(suffix=suffix, 
+                                prefix="anaconda-%s-" % name, dir=self.tmpdir)
+        if register:
+            self.files.append(filename)
+        return filename
+     
+    
+    def create_archive(self, name="logs"):
+        if not self.archive_obj:
+            raise LogPickerError("Object for archiving hasn't been passed.")
+        
+        self.archive = self._get_tmp_file(name, 
+                            suffix=self.archive_obj.file_ext, register=False)
+        try:
+            self.archive_obj.create_archive(self.archive, self.files)
+        except (ArchivationError, NoFilesArchivationError):
+            os.remove(self.archive)
+            raise
+
+   
+    def send(self):
+        if not self.archive_obj:
+            raise LogPickerError("Object for sending hasn't been passed.")
+    
+        if not self.archive and len(self.files) > 1:
+            raise LogPickerError('You have more than one file to send. ' + \
+                    'You have to create archive. Use create_archive() method.')
+               
+        file = self.files[0]
+        contenttype="text/plain"
+        if self.archive:
+            file = self.archive
+            contenttype = self.archive_obj.mimetype
+        
+        try:
+            self.sender_obj.sendfile(file, contenttype)
+        except (SenderError) as e:
+            self._errprint(e)
+
+    
+    def getlogs(self):
+        
+        f = open(self.filename, 'w')
+        
+        for miner in self.miners:
+            desc = "%s (%s)" % (miner._name, miner._description)
+            f.write(desc+"\n")
+            try:
+                miner(f).getlog()
+            except (LogMinerError) as e:
+                self._errprint("Warning: %s - %s" % (miner._name, e))
+                f.write("\n%s\n\n\n" % e)
+
+        f.close()
+    
diff --git a/utils/log_picker/archiving.py b/utils/log_picker/archiving.py
new file mode 100644
index 0000000..753fc2b
--- /dev/null
+++ b/utils/log_picker/archiving.py
@@ -0,0 +1,117 @@
+import os
+import shutil
+import tempfile
+import tarfile
+import gzip
+
+
+class ArchivationError(Exception):
+    pass
+
+class NoFilesArchivationError(Exception):
+    pass
+
+class ArchiveBaseClass(object):
+
+    _compression = False
+    _ext = ".ext"
+    _mimetype = ""
+    
+    def __init__(self, *args, **kwargs):
+        self._tar_ext = ".tar"
+        pass
+    
+    @property
+    def support_compression(self):
+        return self._compression
+        
+    @property
+    def file_ext(self):
+        return self._ext
+       
+    @property 
+    def mimetype(self):
+        return self._mimetype
+    
+    def _create_tmp_tar(self, filelist):
+        _, tmpfile = tempfile.mkstemp(suffix=self._tar_ext)
+        tar = tarfile.open(tmpfile, "w")
+        for name in filelist:
+            arcname = name.rsplit('/', 1)[-1]
+            tar.add(name, arcname=arcname)
+        tar.close()
+        return tmpfile
+       
+    def create_archive(self, outfilename, filelist):
+        raise NotImplementedError()
+
+
+class tarArchive(ArchiveBaseClass):
+
+    _compression = False
+    _ext = ".tar"
+    _mimetype = "application/x-tar"
+    
+    def __init__(self, *args, **kwargs):
+        ArchiveBaseClass.__init__(self, args, kwargs)
+    
+    def create_archive(self, outfilename, filelist):
+        if not filelist:
+            raise NoFilesArchivationError("No files to archive.")
+        
+        size = 0
+        for file in filelist:
+            size += os.path.getsize(file)
+        if size <= 0:
+            raise NoFilesArchivationError("No files to archive.")
+        
+        tmptar = self._create_tmp_tar(filelist)
+        shutil.move(tmptar, outfilename)
+        return outfilename
+    
+
+class gzipArchive(ArchiveBaseClass):
+    
+    _compression = True
+    _ext = ".gz"
+    _mimetype = "application/x-gzip"
+    
+    def __init__(self, usetar=True, *args, **kwargs):
+        ArchiveBaseClass.__init__(self, args, kwargs)
+        self.usetar = usetar
+
+    @property
+    def file_ext(self):
+        if self.usetar:
+            return "%s%s" % (self._tar_ext, self._ext)
+        return self._ext
+
+    def create_archive(self, outfilename, filelist):
+        if not filelist:
+            raise NoFilesArchivationError("No files to archive.")
+        
+        size = 0
+        for file in filelist:
+            size += os.path.getsize(file)
+        if size <= 0:
+            raise NoFilesArchivationError("No files to archive.")
+            
+        if not self.usetar and len(filelist) > 1:
+            raise ArchivationError("Gzip cannot archive multiple files without tar.")
+        
+        if self.usetar:
+            f_in_path = self._create_tmp_tar(filelist)
+        else:
+            f_in_path = filelist[0]
+         
+        f_in = open(f_in_path, 'rb')
+        f_out = gzip.open(outfilename, 'wb')
+        f_out.writelines(f_in)
+        f_out.close()
+        f_in.close()
+        
+        if self.usetar:
+            os.remove(f_in_path)
+        
+        return outfilename
+    
diff --git a/utils/log_picker/logmining.py b/utils/log_picker/logmining.py
new file mode 100644
index 0000000..61f300b
--- /dev/null
+++ b/utils/log_picker/logmining.py
@@ -0,0 +1,191 @@
+import os
+import shlex
+import time
+import subprocess
+
+
+class LogMinerError(Exception):
+    pass
+
+
+class LogMinerBaseClass(object):
+
+    _name = "name"
+    _description = "Description"
+    _prefer_separate_file = True
+    
+    def __init__(self, logfile=None, *args, **kwargs):
+        self.logfile = logfile
+        self._used = False
+    
+    def _write_separator(self):
+        self.logfile.write('\n\n')
+    
+    def _get_file(self, file):
+        self.get_files([file])
+    
+    def _get_files(self, files):
+        if self._used:
+            self._write_separator()
+        self._used = True
+        
+        for filename in files:
+            self.logfile.write('%s:\n' % filename)
+            try:
+                f = open(filename, 'r')
+            except (IOError) as e:
+                self.logfile.write("Exception while opening: %s\n" % e)
+                continue
+            
+            self.logfile.writelines(f)
+            f.close()
+    
+    def _run_command(self, command):
+        if self._used:
+            self._write_separator()
+        self._used = True
+        
+        if isinstance(command, basestring):
+            command = shlex.split(command)
+        
+        self.logfile.write('"%s" execution:\n' % (' '.join(command)))
+        proc = subprocess.Popen(command, stdout=subprocess.PIPE, 
+                                stderr=subprocess.PIPE)
+        (out, err) = proc.communicate()
+        self.logfile.write('STDOUT:\n%s\n' % out)
+        self.logfile.write('STDERR:\n%s\n' % err)
+        self.logfile.write('RETURN CODE: %s\n' % proc.returncode)
+    
+    def getlog(self):
+        self._action()
+        self._write_separator()
+    
+    def _action(self):
+        raise NotImplementedError()
+
+
+
+class AnacondaLogMiner(LogMinerBaseClass):
+
+    _name = "anaconda_log"
+    _description = "Log dumped from Anaconda."
+    _prefer_separate_file = True
+
+    def _action(self):
+        # Actual state of /tmp
+        old_state = set(os.listdir('/tmp'))
+        
+        # Tell Anaconda to dump itself
+        proc = subprocess.Popen(shlex.split("killall -s SIGUSR2 anaconda"), 
+                                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        proc.communicate()
+        #ret = os.system("killall -s SIGUSR2 anaconda")
+        if proc.returncode:
+            raise LogMinerError('Error while sending signal to Anaconda')
+        
+        time.sleep(5)
+        
+        # Check if new traceback file exists
+        new_state = set(os.listdir('/tmp'))
+        tbpfiles = list(new_state - old_state)
+        
+        if not len(tbpfiles):
+            raise LogMinerError('Error: No anaconda traceback file exist')
+            
+        for file in tbpfiles:
+            if file.startswith('anaconda-tb-'):
+                tbpfile_name = file
+                break
+        else:
+            raise LogMinerError('Error: No anaconda traceback file exist')
+        
+        # Copy anaconda traceback log
+        self._get_file('/tmp/%s' % tbpfile_name)
+
+
+
+class FileSystemLogMiner(LogMinerBaseClass):
+
+    _name = "filesystem"
+    _description = "Create image of disc structure."
+    _prefer_separate_file = True
+
+    FSTREE_FORMAT = "[%5s%1s] %s"
+    DADPOINT = 1                    # Number of Digits After the Decimal POINT
+
+    def _action(self):
+        self._get_tree_structure()
+
+    def _size_conversion(self, size):
+        """Converts bytes into KB, MB or GB"""
+        if size > 1073741824:  # Gigabytes
+            size = round(size / 1073741824.0, self.DADPOINT)
+            unit = "G"
+        elif size > 1048576:   # Megabytes
+            size = round(size / 1048576.0, self.DADPOINT)
+            unit = "M"
+        elif size > 1024:      # Kilobytes
+            size = round(size / 1024.0, self.DADPOINT)
+            unit = "K"
+        else:
+            size = size
+            unit = ""
+        return size, unit
+    
+    
+    def _get_tree_structure(self, human_readable=True):
+        """Creates filesystem structure image."""
+        white_list = ['/sys',]
+        
+        logfile = self.logfile
+        
+        for path, dirs, files in os.walk('/'):
+            line = "\n%s:" % (path)
+            logfile.write('%s\n' % line)
+            
+            # List dirs
+            for dir in dirs:
+                fullpath = os.path.join(path, dir)
+                size = os.path.getsize(fullpath)
+                unit = ""
+                if human_readable:
+                    size, unit = self._size_conversion(size)
+                line = self.FSTREE_FORMAT % (size, unit, dir)
+                logfile.write('%s\n' % line)
+            
+            # Skip mounted directories
+            original_dirs = dirs[:]
+            for dir in original_dirs:
+                dirpath = os.path.join(path, dir)
+                if os.path.ismount(dirpath) and not dirpath in white_list:
+                    dirs.remove(dir)
+            
+            # List files
+            for filename in files:
+                fullpath = os.path.join(path, filename)
+                if os.path.islink(fullpath):
+                    line = self.FSTREE_FORMAT % ("", "", filename)
+                    line += " -> %s" % os.path.realpath(fullpath)
+                    if not os.path.isfile(fullpath):
+                        # Broken symlink
+                        line += " (Broken)"
+                else:
+                    size = os.path.getsize(fullpath)
+                    unit = ""
+                    if human_readable:
+                        size, unit = self._size_conversion(size)
+                    line = self.FSTREE_FORMAT % (size, unit, filename)
+                logfile.write('%s\n' % line)
+
+
+
+class DmSetupLogMiner(LogMinerBaseClass):
+
+    _name = "dmsetup"
+    _description = "Get output from dmsetup."
+    _prefer_separate_file = True
+
+    def _action(self):
+        self._run_command("dmsetup ls --tree")
+        self._run_command("dmsetup info -c")
+
diff --git a/utils/log_picker/sending.py b/utils/log_picker/sending.py
new file mode 100644
index 0000000..a341d96
--- /dev/null
+++ b/utils/log_picker/sending.py
@@ -0,0 +1,80 @@
+import os
+import datetime
+from socket import gethostname
+from report.plugins.bugzilla import filer
+from report.plugins.bugzilla.filer import CommunicationError
+from report.plugins.bugzilla.filer import LoginError
+
+
+class SenderError(Exception):
+    pass
+
+
+class SenderBaseClass(object):
+    
+    def __init__(self, *args, **kwargs):
+        pass
+    
+    def sendfile(self, file, contenttype):
+        raise NotImplementedError()
+    
+
+class BugzillaBaseClass(SenderBaseClass):
+
+    _bz_address = ""
+    _bz_xmlrpc = ""
+    _description = ""
+    
+    def __init__(self, *args, **kwargs):
+        SenderBaseClass.__init__(self, args, kwargs)
+        self.bzfiler = None
+        self.bug_id = None
+        self.comment = None
+    
+    def connect_and_login(self, username, password):
+        try:
+            self.bzfiler = filer.BugzillaFiler(self._bz_xmlrpc, self._bz_address,
+                                        filer.getVersion(), filer.getProduct())
+            self.bzfiler.login(username, password)
+        except (CommunicationError, LoginError) as e:
+            raise SenderError("%s. Bad password?" % e)
+    
+    def set_bug(self, bug_id):
+        self.bug_id = bug_id
+    
+    def set_comment(self, comment):
+        self.comment = comment
+    
+    def sendfile(self, file, contenttype):
+        if not self.bzfiler:
+            raise SenderError('No connection to bugzilla')
+        if not self.bug_id:
+            raise SenderError('No bug number is set')
+
+        hostname = gethostname()
+        date_str = datetime.datetime.now().strftime("%Y-%m-%d")
+        description = "%s (%s) %s" % (self._description, hostname, date_str)
+
+        dict_args = {'isprivate': False,
+                     'filename': os.path.basename(file),
+                     'contenttype': contenttype}
+        
+        if self.comment:
+            dict_args['comment'] = self.comment
+
+        try:
+            bug = self.bzfiler.getbug(self.bug_id)      
+            bug.attachfile(file, description, **dict_args)
+        except (CommunicationError, ValueError) as e:
+            raise SenderError(e)
+
+
+class RedHatBugzilla(BugzillaBaseClass):
+
+    _bz_address = "http://bugzilla.redhat.com";
+    _bz_xmlrpc = "https://bugzilla.redhat.com/xmlrpc.cgi";
+    _description = "LogPicker"
+    
+    def __init__(self, *args, **kwargs):
+        BugzillaBaseClass.__init__(self, args, kwargs)
+    
diff --git a/utils/logpicker b/utils/logpicker
new file mode 100755
index 0000000..f99e951
--- /dev/null
+++ b/utils/logpicker
@@ -0,0 +1,151 @@
+#!/usr/bin/python
+
+import sys
+import getpass
+import optparse
+import log_picker
+
+import log_picker.archiving as archiving
+from log_picker.archiving import ArchivationError
+from log_picker.archiving import NoFilesArchivationError
+import log_picker.sending as sending
+from log_picker.sending import SenderError
+import log_picker.logmining as logmining
+from log_picker.logmining import LogMinerError
+
+
+MINERS = [logmining.AnacondaLogMiner,
+          logmining.FileSystemLogMiner,
+          logmining.DmSetupLogMiner,
+         ]
+
+# Sender constants
+RHBZ = 0  # RedHat Bugzilla
+
+# Archivator constants
+TAR = 0
+GZIP = 1
+
+
+class ApplicationScope(object):
+    """Application configuration class."""
+    
+    def __init__(self, parser_options={}):
+        # sender
+        self.sender = ""
+        self.bug_id = parser_options.bug_id or ""
+        self.bug_comment = parser_options.bug_comment or ""
+        self.bz_login = parser_options.bz_login or ""
+        self.bz_password = ""
+        
+        # archivator
+        self.archivator = ""
+        
+        # miners
+        if parser_options.l_all:
+            self.miners = MINERS
+        else:
+            self.miners = [logmining.AnacondaLogMiner]
+            if parser_options.l_filesystem:
+                self.miners.append(logmining.FileSystemLogMiner)
+            if parser_options.l_dmsetup:
+                self.miners.append(logmining.DmSetupLogMiner)
+
+
+class Injector(object):
+    """Main factory class."""
+
+    @staticmethod
+    def injectMainHelper(scope):
+        logpicker = Injector.injectLogPicker(scope)
+        return MainHelper(logpicker)
+    
+    @staticmethod
+    def injectLogPicker(scope):
+        sender = Injector.injectSender(scope)
+        archivator = Injector.injectArchivator(scope)
+        return log_picker.LogPicker(archive_obj=archivator, sender_obj=sender, 
+                                    miners=scope.miners)
+    
+    @staticmethod
+    def injectSender(scope):
+        if scope.sender == RHBZ:
+            sender = sending.RedHatBugzilla()
+            sender.set_bug(scope.bug_id)
+            sender.set_comment(scope.bug_comment)
+            sender.connect_and_login(scope.bz_login, scope.bz_password)
+            return sender
+
+        raise Exception("Unknown sender type.")
+    
+    @staticmethod
+    def injectArchivator(scope):
+        if scope.archivator == TAR:
+            return archiving.tarArchive()
+        if scope.archivator == GZIP:
+            return archiving.gzipArchive()
+            
+        raise Exception("Unknown archivator type.")
+    
+
+class MainHelper(object):
+    """Main application class."""
+    
+    def __init__(self, logpicker):
+        self.picker = logpicker
+    
+    def run(self):               
+        self.picker.getlogs()
+        
+        try:
+            self.picker.create_archive()
+        except (NoFilesArchivationError) as e:
+            print "Nothing to report. Select more log gathering options."
+            return
+        
+        self.picker.send()
+
+
+if __name__ == "__main__":
+
+    # Argument parsing
+    parser = optparse.OptionParser()
+    parser.add_option("-b", "--bug", dest="bug_id",
+                      help="Bug id in bugzilla.", metavar="BUGID")
+    parser.add_option("-c", "--comment", dest="bug_comment", default=None,
+                      help="Report comment.", metavar="COMMENT")
+    parser.add_option("-l", "--login", dest="bz_login",
+                      help="Bugzilla username.", metavar="USERNAME")
+    
+    group = optparse.OptionGroup(parser, "Log gathering options")
+    group.add_option("-A", "--all", dest="l_all", action="store_true", 
+                      help="Gather all logs.")
+    group.add_option("-f", "--filesystem", dest="l_filesystem", action="store_true", 
+                      help="Filesystem structure log.")
+    group.add_option("-d", "--dmsetup", dest="l_dmsetup", action="store_true", 
+                      help="Low level logical volume (dmsetup) log.")
+    parser.add_option_group(group)
+    
+    (options, args) = parser.parse_args()
+    
+    # Application scope
+    scope = ApplicationScope(parser_options=options)
+    scope.archivator = GZIP
+    scope.sender = RHBZ
+    
+    if not scope.bz_login:
+        scope.bz_login = raw_input("Bugzilla username: ")
+    scope.bz_password = getpass.getpass("Bugzilla password: ")
+    if not scope.bug_id:
+        scope.bug_id = raw_input("Bug id in bugzilla: ")
+    
+    # Application
+    try:
+        app = Injector.injectMainHelper(scope)
+    except Exception as e:
+        print "Error: %s" % e
+        sys.exit(1)
+    
+    app.run()
+    print "Successfully completed!"
+    
-- 
1.7.2.2


[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]