Kaynağa Gözat

added warnings to report plugin
moved the intelligence of the limit module into its own plugin to be able to use it headless
added warnings as doxygen lists to and added a warnings count to tables

prozessorkern 5 yıl önce
ebeveyn
işleme
d35b7f811a

+ 1 - 1
ext/std/tools/limit.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 module:  limit
 class:   Plugin
-depends: mpp.dbf
+depends: std.tools.limit_backend
 actions: limit
 enabled: True

+ 5 - 301
ext/std/tools/limit.py

@@ -12,310 +12,14 @@ import mpp.api
 import mpp.utils
 import mpp.cout
 
-class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
-    
-    MODE_NEW     = 0x01
-    MODE_TREND   = 0x03
-    MODE_TOUCHED = 0x07
-    MODE_ALL     = 0x15
-
-    def declare_configuration(self, parser):
-        self.parser = parser
-        parser.add_option("--hotspots", "--hs", default=None, help="If not set (none), all exceeded limits are printed."
-                          " If set, exceeded limits are sorted (the worst is the first) and only first HOTSPOTS limits are printed."
-                          " [default: %default]", type=int)
-        parser.add_option("--disable-suppressions", "--ds", action="store_true", default=False,
-                          help = "If not set (none), all suppressions are ignored"
-                                 " and associated warnings are printed. [default: %default]")
-        parser.add_option("--warn-mode", "--wm", default='all', choices=['new', 'trend', 'touched', 'all'],
-                         help="Defines the warnings mode. "
-                         "'all' - all warnings active, "
-                         "'new' - warnings for new regions/files only, "
-                         "'trend' - warnings for new regions/files and for bad trend of modified regions/files, "
-                         "'touched' - warnings for new and modified regions/files "
-                         "[default: %default]")
-        parser.add_option("--min-limit", "--min", action="multiopt",
-                          help="A threshold per 'namespace:field' metric in order to select regions, "
-                          "which have got metric value less than the specified limit. "
-                          "This option can be specified multiple times, if it is necessary to apply several limits. "
-                          "Should be in the format: <namespace>:<field>:<limit-value>[:region_type[,region_type]], for example: "
-                          "'std.code.lines:comments:1', or 'std.code.lines:comments:1:function,class'. "
-                          "Region types is optional specifier, and if not defined the limit is applied to regions of all types.")
-        parser.add_option("--max-limit", "--max", action="multiopt",
-                          help="A threshold per 'namespace:field' metric in order to select regions, "
-                          "which have got metric value more than the specified limit. "
-                          "This option can be specified multiple times, if it is necessary to apply several limits. "
-                          "Should be in the format: <namespace>:<field>:<limit-value>[:region_type[,region_type]], for example: "
-                          "'std.code.complexity:cyclomatic:7', or 'std.code.complexity:maxdepth:5:function'. "
-                          "Region types is optional specifier, and if not defined the limit is applied to regions of all types.")
-    
-    def configure(self, options):
-        self.hotspots = options.__dict__['hotspots']
-        self.no_suppress = options.__dict__['disable_suppressions']
-
-        if options.__dict__['warn_mode'] == 'new':
-            self.mode = self.MODE_NEW
-        elif options.__dict__['warn_mode'] == 'trend':
-            self.mode = self.MODE_TREND
-        elif options.__dict__['warn_mode'] == 'touched':
-            self.mode = self.MODE_TOUCHED
-        elif options.__dict__['warn_mode'] == 'all':
-            self.mode = self.MODE_ALL
-            
-        if self.mode != self.MODE_ALL and options.__dict__['db_file_prev'] == None:
-            self.parser.error("option --warn-mode: The mode '" + options.__dict__['warn_mode'] + "' requires '--db-file-prev' option set")
-
-        class Limit(object):
-            def __init__(self, limit_type, limit, namespace, field, db_filter, region_types, original):
-                self.type = limit_type
-                self.limit = limit
-                self.namespace = namespace
-                self.field = field
-                self.filter = db_filter
-                self.region_types = region_types
-                self.original = original
-                
-            def __repr__(self):
-                return "'{0}:{1}' {2} {3} [applied to '{4}' region type(s)]".format(
-                        self.namespace, self.field, self.filter[1], self.limit,
-                        mpp.api.Region.T().to_str(self.region_types))
-        
-        self.limits = []
-        pattern = re.compile(r'''([^:]+)[:]([^:]+)[:]([-+]?[0-9]+(?:[.][0-9]+)?)(?:[:](.+))?''')
-        if options.__dict__['max_limit'] != None:
-            for each in options.__dict__['max_limit']:
-                match = re.match(pattern, each)
-                if match == None:
-                    self.parser.error("option --max-limit: Invalid format: " + each)
-                region_types = 0x00
-                if match.group(4) != None:
-                    for region_type in match.group(4).split(','):
-                        region_type = region_type.strip()
-                        group_id = mpp.api.Region.T().from_str(region_type)
-                        if group_id == None:
-                            self.parser.error(
-                                    "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
-                        region_types |= group_id
-                else:
-                    region_types = mpp.api.Region.T().ANY
-                limit = Limit("max", float(match.group(3)), match.group(1), match.group(2),
-                        (match.group(2), '>', float(match.group(3))), region_types, each)
-                self.limits.append(limit)
-        if options.__dict__['min_limit'] != None:
-            for each in options.__dict__['min_limit']:  
-                match = re.match(pattern, each)
-                if match == None:
-                    self.parser.error("option --min-limit: Invalid format: " + each)
-                region_types = 0x00
-                if match.group(4) != None:
-                    for region_type in match.group(4).split(','):
-                        region_type = region_type.strip()
-                        group_id = mpp.api.Region.T().from_str(region_type)
-                        if group_id == None:
-                            self.parser.error(
-                                    "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
-                        region_types |= group_id
-                else:
-                    region_types = mpp.api.Region.T().ANY
-                limit = Limit("min", float(match.group(3)), match.group(1), match.group(2),
-                        (match.group(2), '<', float(match.group(3))), region_types, each)
-                self.limits.append(limit)
-
-    def initialize(self):
-        super(Plugin, self).initialize()
-        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
-        loader = self.get_plugin('mpp.dbf').get_loader()
-
-        self._verify_namespaces(loader.iterate_namespace_names())
-        for each in loader.iterate_namespace_names():
-            self._verify_fields(each, loader.get_namespace(each).iterate_field_names())
-
-        # Try to optimise iterative change scans
-        self.modified_file_ids = None
-        if self.mode != self.MODE_ALL:
-            self.modified_file_ids = self._get_list_of_modified_files(loader, loader_prev)
-    
-    def _verify_namespaces(self, valid_namespaces):
-        valid = []
-        for each in valid_namespaces:
-            valid.append(each)
-        for each in self.limits:
-            if each.namespace not in valid:
-                self.parser.error("option --{0}-limit: metric '{1}:{2}' is not available in the database file.".
-                                  format(each.type, each.namespace, each.field))
-
-    def _verify_fields(self, namespace, valid_fields):
-        valid = []
-        for each in valid_fields:
-            valid.append(each)
-        for each in self.limits:
-            if each.namespace == namespace:
-                if each.field not in valid:
-                    self.parser.error("option --{0}-limit: metric '{1}:{2}' is not available in the database file.".
-                                      format(each.type, each.namespace, each.field))
-                            
-    def _get_list_of_modified_files(self, loader, loader_prev):
-        logging.info("Identifying changed files...")
-        
-        old_files_map = {}
-        for each in loader_prev.iterate_file_data():
-            old_files_map[each.get_path()] = each.get_checksum()
-        if len(old_files_map) == 0:
-            return None
-        
-        modified_file_ids = []
-        for each in loader.iterate_file_data():
-            if len(modified_file_ids) > 1000: # If more than 1000 files changed, skip optimisation
-                return None
-            if (each.get_path() not in list(old_files_map.keys())) or old_files_map[each.get_path()] != each.get_checksum():
-                modified_file_ids.append(str(each.get_id()))
-
-        old_files_map = None
-                
-        if len(modified_file_ids) != 0:
-            modified_file_ids = " , ".join(modified_file_ids)
-            modified_file_ids = "(" + modified_file_ids + ")"
-            return modified_file_ids
-        
-        return None
-
-    def _is_metric_suppressed(self, metric_namespace, metric_field, loader, select_data):
-        data = loader.load_file_data(select_data.get_path())
-        if select_data.get_region() != None:
-            data = data.get_region(select_data.get_region().get_id())
-            sup_data = data.get_data('std.suppress', 'list')
-        else:
-            sup_data = data.get_data('std.suppress.file', 'list')
-        if sup_data != None and sup_data.find('[' + metric_namespace + ':' + metric_field + ']') != -1:
-            return True
-        return False
-
-                    
-    def iterate_limits(self):
-        for each in self.limits:
-            yield each   
-
-    def is_mode_matched(self, limit, value, diff, is_modified):
-        if is_modified == None:
-            # means new region, True in all modes
-            return True
-        if self.mode == self.MODE_ALL:
-            return True 
-        if self.mode == self.MODE_TOUCHED and is_modified == True:
-            return True 
-        if self.mode == self.MODE_TREND and is_modified == True:
-            if limit < value and diff > 0:
-                return True
-            if limit > value and diff < 0:
-                return True
-        return False
-
-    def get_warnings(self, path, limit):
-
-        class Warning (object):
-        
-            def __init__(self, path, cursor, namespace, field, region_name,
-                            stat_level, trend_value, stat_limit,
-                            is_modified, is_suppressed):
-                self.path = path
-                self.cursor = cursor
-                self.namespace = namespace
-                self.field = field
-                self.region_name = region_name
-                self.stat_level = stat_level
-                self.trend_value = trend_value
-                self.stat_limit = stat_limit
-                self.is_modified = is_modified
-                self.is_suppressed = is_suppressed
-
-        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
-        loader = self.get_plugin('mpp.dbf').get_loader()
-
-        warnings = []
-
-        filters = [limit.filter]
-        if self.modified_file_ids != None:
-            filters.append(('file_id', 'IN', self.modified_file_ids))
-        sort_by = None
-        limit_by = None
-        limit_warnings = None
-        if self.hotspots != None:
-            sort_by = limit.field
-            if limit.type == "max":
-                sort_by = "-" + sort_by
-            if self.mode == self.MODE_ALL:
-                # if it is not ALL mode, the tool counts number of printed warnings below
-                limit_by = self.hotspots
-            limit_warnings = self.hotspots
-        selected_data = loader.load_selected_data(limit.namespace,
-                                                fields = [limit.field],
-                                                path=path,
-                                                filters = filters,
-                                                sort_by=sort_by,
-                                                limit_by=limit_by)
-        if selected_data == None:
-            mpp.utils.report_bad_path(path)
-            return None
-        
-        for select_data in selected_data:
-            if limit_warnings != None and limit_warnings <= 0:
-                break
-            
-            is_modified = None
-            diff = None
-            file_data = loader.load_file_data(select_data.get_path())
-            file_data_prev = loader_prev.load_file_data(select_data.get_path())
-            if file_data_prev != None:
-                if file_data.get_checksum() == file_data_prev.get_checksum():
-                    diff = 0
-                    is_modified = False
-                else:
-                    matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
-                    prev_id = matcher.get_prev_id(select_data.get_region().get_id())
-                    if matcher.is_matched(select_data.get_region().get_id()):
-                        if matcher.is_modified(select_data.get_region().get_id()):
-                            is_modified = True
-                        else:
-                            is_modified = False
-                        diff = mpp.api.DiffData(select_data,
-                                                        file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
-
-            if (self.is_mode_matched(limit.limit,
-                                            select_data.get_data(limit.namespace, limit.field),
-                                            diff,
-                                            is_modified) == False):
-                continue
-            
-            is_sup = self._is_metric_suppressed(limit.namespace, limit.field, loader, select_data)
-            if is_sup == True and self.no_suppress == False:
-                continue    
-            
-            region_cursor = 0
-            region_name = None
-            if select_data.get_region() != None:
-                if select_data.get_region().get_type() & limit.region_types == 0:
-                    continue
-                region_cursor = select_data.get_region().cursor
-                region_name = select_data.get_region().name
-            warnings.append(Warning(select_data.get_path(),
-                                region_cursor,
-                                limit.namespace,
-                                limit.field,
-                                region_name,
-                                select_data.get_data(limit.namespace, limit.field),
-                                diff,
-                                limit.limit,
-                                is_modified,
-                                is_sup))
-            if limit_warnings != None:
-                    limit_warnings -= 1
-        
-        return warnings
+class Plugin(mpp.api.Plugin, mpp.api.IRunable):
 
     def print_warnings(self, args):
         exit_code = 0
         warnings = []
 
+        limit_backend = self.get_plugin('std.tools.limit_backend')
+
         paths = None
         if len(args) == 0:
             paths = [""]
@@ -325,11 +29,11 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
         for path in paths:
             path = mpp.utils.preprocess_path(path)
             
-            for limit in self.iterate_limits():
+            for limit in limit_backend.iterate_limits():
                 warns_count = 0
                 logging.info("Applying limit: " + str(limit))
 
-                warnings = self.get_warnings(path, limit)
+                warnings = limit_backend.get_warnings(path, limit)
                 if warnings == None:
                     exit_code += 1
                 else:

+ 15 - 0
ext/std/tools/limit_backend.ini

@@ -0,0 +1,15 @@
+;
+;    Metrix++, Copyright 2009-2019, Metrix++ Project
+;    Link: https://github.com/metrixplusplus/metrixplusplus
+;    
+;    This file is a part of Metrix++ Tool.
+;    
+
+[Plugin]
+version: 1.0
+package: std.tools
+module:  limit_backend
+class:   Plugin
+depends: mpp.dbf
+actions:
+enabled: True

+ 323 - 0
ext/std/tools/limit_backend.py

@@ -0,0 +1,323 @@
+#
+#    Metrix++, Copyright 2009-2019, Metrix++ Project
+#    Link: https://github.com/metrixplusplus/metrixplusplus
+#    
+#    This file is a part of Metrix++ Tool.
+#    
+
+import logging
+import re
+
+import mpp.api
+import mpp.utils
+import mpp.cout
+
+class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
+    
+    MODE_NEW     = 0x01
+    MODE_TREND   = 0x03
+    MODE_TOUCHED = 0x07
+    MODE_ALL     = 0x15
+
+    def declare_configuration(self, parser):
+        self.parser = parser
+        parser.add_option("--hotspots", "--hs", default=None, help="If not set (none), all exceeded limits are printed."
+                          " If set, exceeded limits are sorted (the worst is the first) and only first HOTSPOTS limits are printed."
+                          " [default: %default]", type=int)
+        parser.add_option("--disable-suppressions", "--ds", action="store_true", default=False,
+                          help = "If not set (none), all suppressions are ignored"
+                                 " and associated warnings are printed. [default: %default]")
+        parser.add_option("--warn-mode", "--wm", default='all', choices=['new', 'trend', 'touched', 'all'],
+                         help="Defines the warnings mode. "
+                         "'all' - all warnings active, "
+                         "'new' - warnings for new regions/files only, "
+                         "'trend' - warnings for new regions/files and for bad trend of modified regions/files, "
+                         "'touched' - warnings for new and modified regions/files "
+                         "[default: %default]")
+        parser.add_option("--min-limit", "--min", action="multiopt",
+                          help="A threshold per 'namespace:field' metric in order to select regions, "
+                          "which have got metric value less than the specified limit. "
+                          "This option can be specified multiple times, if it is necessary to apply several limits. "
+                          "Should be in the format: <namespace>:<field>:<limit-value>[:region_type[,region_type]], for example: "
+                          "'std.code.lines:comments:1', or 'std.code.lines:comments:1:function,class'. "
+                          "Region types is optional specifier, and if not defined the limit is applied to regions of all types.")
+        parser.add_option("--max-limit", "--max", action="multiopt",
+                          help="A threshold per 'namespace:field' metric in order to select regions, "
+                          "which have got metric value more than the specified limit. "
+                          "This option can be specified multiple times, if it is necessary to apply several limits. "
+                          "Should be in the format: <namespace>:<field>:<limit-value>[:region_type[,region_type]], for example: "
+                          "'std.code.complexity:cyclomatic:7', or 'std.code.complexity:maxdepth:5:function'. "
+                          "Region types is optional specifier, and if not defined the limit is applied to regions of all types.")
+    
+    def configure(self, options):
+        self.hotspots = options.__dict__['hotspots']
+        self.no_suppress = options.__dict__['disable_suppressions']
+
+        if options.__dict__['warn_mode'] == 'new':
+            self.mode = self.MODE_NEW
+        elif options.__dict__['warn_mode'] == 'trend':
+            self.mode = self.MODE_TREND
+        elif options.__dict__['warn_mode'] == 'touched':
+            self.mode = self.MODE_TOUCHED
+        elif options.__dict__['warn_mode'] == 'all':
+            self.mode = self.MODE_ALL
+            
+        if self.mode != self.MODE_ALL and options.__dict__['db_file_prev'] == None:
+            self.parser.error("option --warn-mode: The mode '" + options.__dict__['warn_mode'] + "' requires '--db-file-prev' option set")
+
+        class Limit(object):
+            def __init__(self, limit_type, limit, namespace, field, db_filter, region_types, original):
+                self.type = limit_type
+                self.limit = limit
+                self.namespace = namespace
+                self.field = field
+                self.filter = db_filter
+                self.region_types = region_types
+                self.original = original
+                
+            def __repr__(self):
+                return "'{0}:{1}' {2} {3} [applied to '{4}' region type(s)]".format(
+                        self.namespace, self.field, self.filter[1], self.limit,
+                        mpp.api.Region.T().to_str(self.region_types))
+        
+        self.limits = []
+        pattern = re.compile(r'''([^:]+)[:]([^:]+)[:]([-+]?[0-9]+(?:[.][0-9]+)?)(?:[:](.+))?''')
+        if options.__dict__['max_limit'] != None:
+            for each in options.__dict__['max_limit']:
+                match = re.match(pattern, each)
+                if match == None:
+                    self.parser.error("option --max-limit: Invalid format: " + each)
+                region_types = 0x00
+                if match.group(4) != None:
+                    for region_type in match.group(4).split(','):
+                        region_type = region_type.strip()
+                        group_id = mpp.api.Region.T().from_str(region_type)
+                        if group_id == None:
+                            self.parser.error(
+                                    "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
+                        region_types |= group_id
+                else:
+                    region_types = mpp.api.Region.T().ANY
+                limit = Limit("max", float(match.group(3)), match.group(1), match.group(2),
+                        (match.group(2), '>', float(match.group(3))), region_types, each)
+                self.limits.append(limit)
+        if options.__dict__['min_limit'] != None:
+            for each in options.__dict__['min_limit']:  
+                match = re.match(pattern, each)
+                if match == None:
+                    self.parser.error("option --min-limit: Invalid format: " + each)
+                region_types = 0x00
+                if match.group(4) != None:
+                    for region_type in match.group(4).split(','):
+                        region_type = region_type.strip()
+                        group_id = mpp.api.Region.T().from_str(region_type)
+                        if group_id == None:
+                            self.parser.error(
+                                    "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
+                        region_types |= group_id
+                else:
+                    region_types = mpp.api.Region.T().ANY
+                limit = Limit("min", float(match.group(3)), match.group(1), match.group(2),
+                        (match.group(2), '<', float(match.group(3))), region_types, each)
+                self.limits.append(limit)
+
+    def initialize(self):
+        super(Plugin, self).initialize()
+        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
+        loader = self.get_plugin('mpp.dbf').get_loader()
+
+        self._verify_namespaces(loader.iterate_namespace_names())
+        for each in loader.iterate_namespace_names():
+            self._verify_fields(each, loader.get_namespace(each).iterate_field_names())
+
+        # Try to optimise iterative change scans
+        self.modified_file_ids = None
+        if self.mode != self.MODE_ALL:
+            self.modified_file_ids = self._get_list_of_modified_files(loader, loader_prev)
+    
+    def _verify_namespaces(self, valid_namespaces):
+        valid = []
+        for each in valid_namespaces:
+            valid.append(each)
+        for each in self.limits:
+            if each.namespace not in valid:
+                self.parser.error("option --{0}-limit: metric '{1}:{2}' is not available in the database file.".
+                                  format(each.type, each.namespace, each.field))
+
+    def _verify_fields(self, namespace, valid_fields):
+        valid = []
+        for each in valid_fields:
+            valid.append(each)
+        for each in self.limits:
+            if each.namespace == namespace:
+                if each.field not in valid:
+                    self.parser.error("option --{0}-limit: metric '{1}:{2}' is not available in the database file.".
+                                      format(each.type, each.namespace, each.field))
+                            
+    def _get_list_of_modified_files(self, loader, loader_prev):
+        logging.info("Identifying changed files...")
+        
+        old_files_map = {}
+        for each in loader_prev.iterate_file_data():
+            old_files_map[each.get_path()] = each.get_checksum()
+        if len(old_files_map) == 0:
+            return None
+        
+        modified_file_ids = []
+        for each in loader.iterate_file_data():
+            if len(modified_file_ids) > 1000: # If more than 1000 files changed, skip optimisation
+                return None
+            if (each.get_path() not in list(old_files_map.keys())) or old_files_map[each.get_path()] != each.get_checksum():
+                modified_file_ids.append(str(each.get_id()))
+
+        old_files_map = None
+                
+        if len(modified_file_ids) != 0:
+            modified_file_ids = " , ".join(modified_file_ids)
+            modified_file_ids = "(" + modified_file_ids + ")"
+            return modified_file_ids
+        
+        return None
+
+    def _is_metric_suppressed(self, metric_namespace, metric_field, loader, select_data):
+        data = loader.load_file_data(select_data.get_path())
+        if select_data.get_region() != None:
+            data = data.get_region(select_data.get_region().get_id())
+            sup_data = data.get_data('std.suppress', 'list')
+        else:
+            sup_data = data.get_data('std.suppress.file', 'list')
+        if sup_data != None and sup_data.find('[' + metric_namespace + ':' + metric_field + ']') != -1:
+            return True
+        return False
+
+                    
+    def iterate_limits(self):
+        for each in self.limits:
+            yield each   
+
+    def is_mode_matched(self, limit, value, diff, is_modified):
+        if is_modified == None:
+            # means new region, True in all modes
+            return True
+        if self.mode == self.MODE_ALL:
+            return True 
+        if self.mode == self.MODE_TOUCHED and is_modified == True:
+            return True 
+        if self.mode == self.MODE_TREND and is_modified == True:
+            if limit < value and diff > 0:
+                return True
+            if limit > value and diff < 0:
+                return True
+        return False
+
+    def get_warnings(self, path, limit):
+
+        class Warning (object):
+        
+            def __init__(self, path, cursor, namespace, field, region_name,
+                            stat_level, trend_value, stat_limit,
+                            is_modified, is_suppressed):
+                self.path = path
+                self.cursor = cursor
+                self.namespace = namespace
+                self.field = field
+                self.region_name = region_name
+                self.stat_level = stat_level
+                self.trend_value = trend_value
+                self.stat_limit = stat_limit
+                self.is_modified = is_modified
+                self.is_suppressed = is_suppressed
+
+        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
+        loader = self.get_plugin('mpp.dbf').get_loader()
+
+        warnings = []
+
+        filters = [limit.filter]
+        if self.modified_file_ids != None:
+            filters.append(('file_id', 'IN', self.modified_file_ids))
+        sort_by = None
+        limit_by = None
+        limit_warnings = None
+        if self.hotspots != None:
+            sort_by = limit.field
+            if limit.type == "max":
+                sort_by = "-" + sort_by
+            if self.mode == self.MODE_ALL:
+                # if it is not ALL mode, the tool counts number of printed warnings below
+                limit_by = self.hotspots
+            limit_warnings = self.hotspots
+        selected_data = loader.load_selected_data(limit.namespace,
+                                                fields = [limit.field],
+                                                path=path,
+                                                filters = filters,
+                                                sort_by=sort_by,
+                                                limit_by=limit_by)
+        if selected_data == None:
+            mpp.utils.report_bad_path(path)
+            return None
+        
+        for select_data in selected_data:
+            if limit_warnings != None and limit_warnings <= 0:
+                break
+            
+            is_modified = None
+            diff = None
+            file_data = loader.load_file_data(select_data.get_path())
+            file_data_prev = loader_prev.load_file_data(select_data.get_path())
+            if file_data_prev != None:
+                if file_data.get_checksum() == file_data_prev.get_checksum():
+                    diff = 0
+                    is_modified = False
+                else:
+                    matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
+                    prev_id = matcher.get_prev_id(select_data.get_region().get_id())
+                    if matcher.is_matched(select_data.get_region().get_id()):
+                        if matcher.is_modified(select_data.get_region().get_id()):
+                            is_modified = True
+                        else:
+                            is_modified = False
+                        diff = mpp.api.DiffData(select_data,
+                                                        file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
+
+            if (self.is_mode_matched(limit.limit,
+                                            select_data.get_data(limit.namespace, limit.field),
+                                            diff,
+                                            is_modified) == False):
+                continue
+            
+            is_sup = self._is_metric_suppressed(limit.namespace, limit.field, loader, select_data)
+            if is_sup == True and self.no_suppress == False:
+                continue    
+            
+            region_cursor = 0
+            region_name = None
+            if select_data.get_region() != None:
+                if select_data.get_region().get_type() & limit.region_types == 0:
+                    continue
+                region_cursor = select_data.get_region().cursor
+                region_name = select_data.get_region().name
+            warnings.append(Warning(select_data.get_path(),
+                                region_cursor,
+                                limit.namespace,
+                                limit.field,
+                                region_name,
+                                select_data.get_data(limit.namespace, limit.field),
+                                diff,
+                                limit.limit,
+                                is_modified,
+                                is_sup))
+            if limit_warnings != None:
+                    limit_warnings -= 1
+        
+        return warnings
+
+    def get_all_warnings(self, path):
+        """ returns all warnings from a specified path """
+
+        warnings = []
+            
+        for limit in self.iterate_limits():
+            warnings = warnings + self.get_warnings(path, limit)
+        
+        return warnings

+ 1 - 1
ext/std/tools/report.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 module:  report
 class:   Plugin
-depends: mpp.dbf,std.tools.limit
+depends: mpp.dbf,std.tools.limit_backend
 actions: report
 enabled: True

+ 59 - 14
ext/std/tools/report.py

@@ -56,16 +56,22 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
         if output_dir:
             os.makedirs(output_dir, exist_ok=True)
             with open(os.path.join(output_dir, "metrixpp.dox"), mode="w+") as file:
-                file.write("/* this file is autogenerated by metrixpp - changes will be overwritten */\n")
+                file.write("/* this file is autogenerated by metrix++ - changes will be overwritten */\n")
                 file.write("/*!\n")
 
-                file.write("\\page metrics Metrics overview\n\n")
+                file.write("\\page metrix_overview Metrix overview\n\n")
+
+                file.write("\\section metrix_sec Metrix Warnings\n\n")
+                file.write("Metrix Limits exceeded {} times.\n\n".format(len(overview_data["warnings"])))
+
+                if len(overview_data["warnings"]) > 0:
+                    file.write("Warning list: \\ref metrix_warnings\n\n")
 
                 for file_data in overview_data["matrix"]:
                     file_data[0] = "\\ref " + file_data[0]
 
                 writer = pytablewriter.MarkdownTableWriter()
-                writer.table_name = "metrics overview"
+                writer.table_name = "metrix overview"
                 writer.headers = overview_data["fields"]
                 writer.value_matrix = overview_data["matrix"]
                 writer.margin = 1
@@ -79,7 +85,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                     file.write("\\file {}\n\n".format(path))
                     
                     writer = pytablewriter.MarkdownTableWriter()
-                    writer.table_name = "metrics"
+                    writer.table_name = "metrix"
                     writer.headers = data[path]["file_fields"]
                     writer.value_matrix = data[path]["file_matrix"]
                     writer.margin = 1
@@ -93,13 +99,32 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                             region[0] = "\\ref " + region[0]
 
                     writer = pytablewriter.MarkdownTableWriter()
-                    writer.table_name = "region metrics"
+                    writer.table_name = "region metrix"
                     writer.headers = data[path]["region_fields"]
                     writer.value_matrix = data[path]["region_matrix"]
                     writer.margin = 1
                     writer.stream = file
                     writer.write_table()
 
+                    file.write("\n")
+
+                    # add warnings as list items
+                    for warning in data[path]["warnings"]:
+                        warning_text = "Metric '" + warning.namespace + ":" + warning.field + "'"
+                        
+                        if warning.region_name and warning.region_name != "__global__":
+                            warning_text = warning_text + " for region \\ref " + warning.region_name
+                        elif warning.region_name == "__global__":
+                            warning_text = warning_text + " for region " + warning.region_name
+                        else:
+                            warning_text = warning_text + " for the file \\ref " + warning.path
+                        
+                        warning_text = warning_text + " exceeds the limit."
+                        warning_text = warning_text + " (value: {} - limit: {})".format(warning.stat_level, warning.stat_limit)
+                        
+                        file.write("\\xrefitem metrix_warnings \"Metrix Warning\" \"Metrix Warnings\" {}\n".format(warning_text))
+                        
+                    
                     file.write("\n\n")
 
                 file.write("*/\n")
@@ -114,9 +139,11 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
 
         data = {}
         overview_data = {}
+        warnings = []
 
         loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
         loader = self.get_plugin('mpp.dbf').get_loader()
+        limit_backend = self.get_plugin('std.tools.limit_backend')
 
         paths = None
         if len(args) == 0:
@@ -128,13 +155,20 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
             path = mpp.utils.preprocess_path(path)
             data[path] = {}
             data[path]["file_data"] = {}
-            data[path]["file_fields"] = []
+            data[path]["file_fields"] = ["warnings"]
             data[path]["file_matrix"] = [[]]
             data[path]["regions"] = {}
-            data[path]["region_fields"] = ["region"]
+            data[path]["region_fields"] = ["region", "warnings"]
             data[path]["region_matrix"] = []
+            data[path]["warnings"] = []
 
             file_data = loader.load_file_data(path)
+            
+            # get warnings from limit plugin
+            data[path]["warnings"] = limit_backend.get_all_warnings(path)
+            # convert paths to increase readability
+            for warning in data[path]["warnings"]:
+                warning.path = os.path.relpath(warning.path)
 
             # load file based data
             data_tree = file_data.get_data_tree()
@@ -144,7 +178,10 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                     data[path]["file_fields"].append(namespace + "." +  field[0])
    
             for field in data[path]["file_fields"]:
-                data[path]["file_matrix"][0].append(data[path]["file_data"][field])
+                if field == "warnings":
+                    data[path]["file_matrix"][0].append(len(data[path]["warnings"]))
+                else:
+                    data[path]["file_matrix"][0].append(data[path]["file_data"][field])
 
             # load region based data
             file_data.load_regions()
@@ -158,10 +195,14 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                         if not (namespace + "." +  field[0]) in data[path]["region_fields"]:
                             data[path]["region_fields"].append(namespace + "." +  field[0])
             
+            # iterate over all found regions in the file
             for region in data[path]["regions"]:
-                region_row = [region]
+                # add static columns with region name and warning count
+                warning_count = sum(warning.region_name == region for warning in data[path]["warnings"])
+                region_row = [region, str(warning_count)]
                 
-                for field in data[path]["region_fields"][1:]:
+                # start iterating after the static fields
+                for field in data[path]["region_fields"][2:]:
                     if field in data[path]["regions"][region]:
                         region_row.append(data[path]["regions"][region][field])
                     else:
@@ -169,7 +210,9 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                 
                 data[path]["region_matrix"].append(region_row)
 
-            overview_data["fields"] = ["file"]
+            # assemble overview table
+            overview_data["warnings"] = []
+            overview_data["fields"] = ["file", "warnings"]
             overview_data["matrix"] = []
             for key, value in data.items():
                 for field in value["file_fields"]:
@@ -177,15 +220,17 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                         overview_data["fields"].append(field)
             
             for key, value in data.items():
-                row = [os.path.relpath(key)]
-                for field in overview_data["fields"][1:]:
+                overview_data["warnings"] = overview_data["warnings"] + value["warnings"]
+                row = [os.path.relpath(key), len(value["warnings"])]
+                for field in overview_data["fields"][2:]:
                     if field in value["file_data"]:
                         row.append(value["file_data"][field])
                     else:
                         row.append("-")
                 
                 overview_data["matrix"].append(row)
-                
+        
+        
         if self.out_format == "doxygen":
             exit_code = self.create_doxygen_report(paths,
                                                    self.out_dir,