Browse Source

hotspots, distributions, improved info tool, file size metric.

avkonst 11 năm trước cách đây
mục cha
commit
375c99cef7
38 tập tin đã thay đổi với 397 bổ sung91 xóa
  1. 14 7
      mainline/core/db/loader.py
  2. 45 8
      mainline/core/db/sqlite.py
  3. 7 0
      mainline/core/dir.py
  4. 1 1
      mainline/core/export/utils/py2txt.py
  5. 1 1
      mainline/core/export/utils/py2xml.py
  6. 8 7
      mainline/ext/std/code/cpp.py
  7. 6 5
      mainline/ext/std/code/cs.py
  8. 5 4
      mainline/ext/std/code/java.py
  9. 0 7
      mainline/readme.txt
  10. 7 1
      mainline/tests/general/test_basic/test_export_format_export_nest_per_file_stdout.gold.txt
  11. 7 1
      mainline/tests/general/test_basic/test_export_format_export_nest_stdout.gold.txt
  12. 1 1
      mainline/tests/general/test_basic/test_export_format_export_python_stdout.gold.txt
  13. 10 1
      mainline/tests/general/test_basic/test_export_format_export_txt_stdout.gold.txt
  14. 6 1
      mainline/tests/general/test_basic/test_export_format_export_xml_stdout.gold.txt
  15. 1 1
      mainline/tests/general/test_basic/test_workflow_collect_default_stderr.gold.txt
  16. 1 1
      mainline/tests/general/test_basic/test_workflow_collect_second_stderr.gold.txt
  17. 1 1
      mainline/tests/general/test_basic/test_workflow_export_default_stderr.gold.txt
  18. 6 1
      mainline/tests/general/test_basic/test_workflow_export_default_stdout.gold.txt
  19. 1 1
      mainline/tests/general/test_basic/test_workflow_export_second_per_file_stderr.gold.txt
  20. 7 1
      mainline/tests/general/test_basic/test_workflow_export_second_per_file_stdout.gold.txt
  21. 1 1
      mainline/tests/general/test_basic/test_workflow_export_second_stderr.gold.txt
  22. 7 1
      mainline/tests/general/test_basic/test_workflow_export_second_stdout.gold.txt
  23. 1 1
      mainline/tests/general/test_basic/test_workflow_info_default_stderr.gold.txt
  24. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_default_stderr.gold.txt
  25. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_stderr.gold.txt
  26. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_all_stderr.gold.txt
  27. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_new_stderr.gold.txt
  28. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_touched_stderr.gold.txt
  29. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_trend_stderr.gold.txt
  30. 7 1
      mainline/tests/general/test_std_code_cpp/test_parser_export_default_stdout.gold.txt
  31. 62 5
      mainline/tests/general/test_std_code_cpp/test_parser_export_files_stdout.gold.txt
  32. 11 1
      mainline/tests/general/test_std_code_cs/test_parser_export_default_stdout.gold.txt
  33. 54 3
      mainline/tests/general/test_std_code_cs/test_parser_export_files_stdout.gold.txt
  34. 10 1
      mainline/tests/general/test_std_code_java/test_parser_export_default_stdout.gold.txt
  35. 68 4
      mainline/tests/general/test_std_code_java/test_parser_export_files_stdout.gold.txt
  36. 18 4
      mainline/tools/export.py
  37. 3 10
      mainline/tools/info.py
  38. 14 2
      mainline/tools/limit.py

+ 14 - 7
mainline/core/db/loader.py

@@ -865,11 +865,17 @@ class Loader(object):
                 if namespace.get_field_packager(field).is_non_zero() == True:
                     data[field]['min'] = None
                     data[field]['avg'] = None
+                distribution = self.db.count_rows(name, path_like = final_path_like, group_by_column = field)
+                data[field]['distribution-bars'] = []
+                for each in distribution:
+                    data[field]['distribution-bars'].append({'metric': each[0],
+                                                             'count': each[1],
+                                                             'ratio': round((float(each[1]) / float(data[field]['count'])), 4)})
                 result.set_data(name, field, data[field])
-        
         return result
     
-    def load_selected_data(self, namespace, fields = None, path = None, path_like_filter = "%", filters = []):
+    def load_selected_data(self, namespace, fields = None, path = None, path_like_filter = "%", filters = [],
+                           sort_by = None, limit_by = None):
         if self.db == None:
             return None
         
@@ -887,8 +893,9 @@ class Loader(object):
         
         class SelectDataIterator(object):
         
-            def iterate_selected_values(self, loader, namespace_obj, final_path_like, fields, filters):
-                for row in loader.db.select_rows(namespace_obj.get_name(), path_like=final_path_like, filters=filters):
+            def iterate_selected_values(self, loader, namespace_obj, final_path_like, fields, filters, sort_by, limit_by):
+                for row in loader.db.select_rows(namespace_obj.get_name(), path_like=final_path_like, filters=filters,
+                                                 order_by=sort_by, limit_by=limit_by):
                     region_id = None
                     if namespace_obj.are_regions_supported() == True:
                         region_id = row['region_id']
@@ -900,11 +907,11 @@ class Loader(object):
                         data.set_data(namespace, field, row[field])
                     yield data
             
-            def __init__(self, loader, namespace_obj, final_path_like, fields, filters):
-                self.iterator = self.iterate_selected_values(loader, namespace_obj, final_path_like, fields, filters)
+            def __init__(self, loader, namespace_obj, final_path_like, fields, filters, sort_by, limit_by):
+                self.iterator = self.iterate_selected_values(loader, namespace_obj, final_path_like, fields, filters, sort_by, limit_by)
     
             def __iter__(self):
                 return self.iterator
 
-        return SelectDataIterator(self, namespace_obj, final_path_like, fields, filters)
+        return SelectDataIterator(self, namespace_obj, final_path_like, fields, filters, sort_by, limit_by)
     

+ 45 - 8
mainline/core/db/sqlite.py

@@ -545,13 +545,16 @@ class Database(object):
         cur.execute(sql, column_data)
         return cur.lastrowid
 
-    def select_rows(self, table_name, path_like = None, column_names = [], filters = []):
+    def select_rows(self, table_name, path_like = None, column_names = [], filters = [], order_by = None, limit_by = None):
         safe_column_names = []
         for each in column_names:
             safe_column_names.append("'" + each + "'")
-        return self.select_rows_unsafe(table_name, path_like = path_like, column_names = safe_column_names, filters = filters)
+        return self.select_rows_unsafe(table_name, path_like = path_like,
+                                       column_names = safe_column_names, filters = filters,
+                                       order_by = order_by, limit_by = limit_by)
 
-    def select_rows_unsafe(self, table_name, path_like = None, column_names = [], filters = []):
+    def select_rows_unsafe(self, table_name, path_like = None, column_names = [], filters = [], 
+                           group_by = None, order_by = None, limit_by = None):
         path_like = self.InternalPathUtils().normalize_path(path_like)
         if self.conn == None:
             return []
@@ -561,7 +564,7 @@ class Database(object):
         what_stmt = ", ".join(column_names)
         if len(what_stmt) == 0:
             what_stmt = "*"
-        elif path_like != None and table_name != '__files__':
+        elif path_like != None and table_name != '__files__' and group_by == None:
             what_stmt += ", '__files__'.'path', '__files__'.'id'"
         inner_stmt = ""
         if path_like != None and table_name != '__files__':
@@ -585,11 +588,26 @@ class Database(object):
                 where_stmt += " AND '__files__'.'path' LIKE ?"
                 values += (path_like, )
             where_stmt += ")"
-        else:
+        elif path_like != None:
             where_stmt = " WHERE '__files__'.'path' LIKE ?"
             values += (path_like, )
+        
+        group_stmt = ""
+        if group_by != None:
+            group_stmt = " GROUP BY (`" + group_by + "`)"
+
+        order_stmt = ""
+        if order_by != None:
+            if order_by.startswith("-"):
+                order_stmt = " ORDER BY (`" + order_by[1:] + "`) DESC "
+            else:
+                order_stmt = " ORDER BY (`" + order_by + "`) "
 
-        sql = "SELECT " + what_stmt + " FROM " + table_stmt + inner_stmt + where_stmt
+        limit_stmt = ""
+        if limit_by != None:
+            limit_stmt = " LIMIT " + str(limit_by)
+
+        sql = "SELECT " + what_stmt + " FROM " + table_stmt + inner_stmt + where_stmt + group_stmt + order_stmt + limit_stmt
         self.log(sql + " /with arguments: " + str(values))
         return self.conn.execute(sql, values).fetchall()
 
@@ -621,7 +639,7 @@ class Database(object):
         
         total_column_names = []
         for column_name in column_names:
-            for func in ['max', 'min', 'avg', 'total']:
+            for func in ['max', 'min', 'avg', 'total', 'count']:
                 total_column_names.append(func + "('" + table_name + "'.'" + column_name + "') AS " + "'" + column_name + "_" + func + "'")
              
         data = self.select_rows_unsafe(table_name, path_like = path_like, column_names = total_column_names, filters = filters)
@@ -629,10 +647,29 @@ class Database(object):
         result = {}
         for column_name in column_names:
             result[column_name] = {}
-            for func in ['max', 'min', 'avg', 'total']:
+            for func in ['max', 'min', 'avg', 'total', 'count']:
                 result[column_name][func] = data[0][column_name + "_" + func]
         return result
     
+    def count_rows(self, table_name, path_like = None, group_by_column = None, filters = []):
+        
+        count_column_names = None
+        
+        if group_by_column != None:
+            for column in self.iterate_columns(table_name):
+                if group_by_column == column.name:
+                    count_column_names = ["`" + group_by_column + "`", "COUNT(`" + group_by_column + "`)"]
+                    break
+        else:
+            count_column_names = ["COUNT(*)"]
+            
+        if count_column_names == None:
+            return []
+             
+        data = self.select_rows_unsafe(table_name, path_like = path_like, column_names = count_column_names,
+                                       filters = filters, group_by = group_by_column)
+        return data
+
     def log(self, sql):
         #import traceback
         #traceback.print_stack()

+ 7 - 0
mainline/core/dir.py

@@ -40,12 +40,15 @@ class Plugin(core.api.Plugin, core.api.Parent, core.api.IConfigurable, core.api.
                          help="If the option is set (True), the tool measures processing time per file [default: %default]")
         parser.add_option("--general.procerrors-on", action="store_true", default=False,
                          help="If the option is set (True), the tool counts number of processing/parsing errors per file [default: %default]")
+        parser.add_option("--general.size-on", action="store_true", default=False,
+                         help="If the option is set (True), the tool collects file size metric (in bytes) [default: %default]")
     
     def configure(self, options):
         self.non_recursively = options.__dict__['general.non_recursively']
         self.add_exclude_rule(re.compile(options.__dict__['general.exclude_files']))
         self.is_proctime_enabled = options.__dict__['general.proctime_on']
         self.is_procerrors_enabled = options.__dict__['general.procerrors_on']
+        self.is_size_enabled = options.__dict__['general.size_on']
 
     def initialize(self):
         namespace = self.get_plugin_loader().get_database_loader().create_namespace('general')
@@ -53,6 +56,8 @@ class Plugin(core.api.Plugin, core.api.Parent, core.api.IConfigurable, core.api.
             namespace.add_field('proctime', float)
         if self.is_procerrors_enabled == True:
             namespace.add_field('procerrors', int)
+        if self.is_size_enabled == True:
+            namespace.add_field('size', int)
         
     def run(self, args):
         if len(args) == 0:
@@ -102,6 +107,8 @@ class DirectoryReader():
                                 data.set_data('general', 'proctime', time.time() - ts)
                             if plugin.is_procerrors_enabled == True and procerrors != None and procerrors != 0:
                                 data.set_data('general', 'procerrors', procerrors)
+                            if plugin.is_size_enabled == True:
+                                data.set_data('general', 'size', len(text))
                             plugin.get_plugin_loader().get_database_loader().save_file_data(data)
                             logging.debug("-" * 60)
                             exit_code += procerrors

+ 1 - 1
mainline/core/export/utils/py2txt.py

@@ -128,7 +128,7 @@ class Py2TXT():
             else:
                 childStr += "\n" + (INDENT_SPACE_SYMBOL * (indent + 1))
                 for string in childObj:
-                    childStr += string;
+                    childStr += str(string);
 
         if objName == None:
             return childStr

+ 1 - 1
mainline/core/export/utils/py2xml.py

@@ -130,7 +130,7 @@ class Py2XML():
             else:
                 childStr += "\n" + (INDENT_SPACE_SYMBOL * (indent + 1)) + "<" + objName[:-1] + ">"
                 for string in childObj:
-                    childStr += string;
+                    childStr += str(string);
                 childStr += "</" + objName[:-1] + ">"
                 
         if objName == None:

+ 8 - 7
mainline/ext/std/code/cpp.py

@@ -55,16 +55,16 @@ class Plugin(core.api.Plugin, core.api.Parent, core.api.IParser, core.api.IConfi
 class CppCodeParser(object):
     
     regex_cpp = re.compile(r'''
-                   /([\\](?:\n|\r|\r\n))*/(?=\n|\r|\r\n)              # Match C++ style comments (empty comment line)
-                |  /([\\](?:\n|\r|\r\n))*/.*?[^\\](?=\n|\r|\r\n)      # Match C++ style comments
+                   /([\\](?:\n|\r\n|\r))*/(?=\n|\r\n|\r)              # Match C++ style comments (empty comment line)
+                |  /([\\](?:\n|\r\n|\r))*/.*?[^\\](?=\n|\r\n|\r)      # Match C++ style comments
                                                                       # NOTE: end of line is NOT consumed
-                                                                      # NOTE: ([\\](?:\n|\r|\r\n))* for new line separators,
+                                                                      # NOTE: ([\\](?:\n|\r\n|\r))* for new line separators,
                                                                       # Need to support new line separators in expense of efficiency?
                 | /\*\*/                                              # Match C style comments (empty comment line)
-                | /([\\](?:\n|\r|\r\n))*\*.*?\*([\\](?:\n|\r|\r\n))*/ # Match C style comments
+                | /([\\](?:\n|\r\n|\r))*\*.*?\*([\\](?:\n|\r\n|\r))*/ # Match C style comments
                 | \'(?:\\.|[^\\\'])*\'                                # Match quoted strings
                 | "(?:\\.|[^\\"])*"                                   # Match double quoted strings
-                | (((?<=\n|\r)|^)[ \t]*[#].*?[^\\](?=\n|\r|\r\n))     # Match preprocessor
+                | (((?<=\n|\r)|^)[ \t]*[#].*?[^\\](?=\n|\r\n|\r))     # Match preprocessor
                                                                       # NOTE: end of line is NOT consumed
                                                                       # NOTE: beginning of line is NOT consumed
                 | (?P<fn_name>
@@ -84,12 +84,13 @@ class CppCodeParser(object):
                                                                       # LIMITATION: if there are comments between keyword and name,
                                                                       # it is not detected
                 | [<>{};:]                                            # Match block start/end, brackets and statement separator
-                | ((?:\n|\r|\r\n)\s*(?:\n|\r|\r\n))                   # Match double empty line
+                | ((?:\n|\r\n|\r)\s*(?:\n|\r\n|\r))                   # Match double empty line
             ''',
             re.DOTALL | re.MULTILINE | re.VERBOSE
         )
     
-    regex_ln = re.compile(r'(\n)|(\r)|(\r\n)')
+    # \r\n goes before \r in order to consume right number of lines on Unix for Windows files
+    regex_ln = re.compile(r'(\n)|(\r\n)|(\r)')
 
     def run(self, data):
         self.__init__() # Go to initial state if it is called twice

+ 6 - 5
mainline/ext/std/code/cs.py

@@ -55,8 +55,8 @@ class Plugin(core.api.Plugin, core.api.Parent, core.api.IParser, core.api.IConfi
 class CsCodeParser(object):
     
     regex_cpp = re.compile(r'''
-                   //(?=\n|\r|\r\n)                                   # Match C# style comments (empty comment line)
-                |  //.*?(?=\n|\r|\r\n)                                # Match C# style comments
+                   //(?=\n|\r\n|\r)                                   # Match C# style comments (empty comment line)
+                |  //.*?(?=\n|\r\n|\r)                                # Match C# style comments
                                                                       # NOTE: end of line is NOT consumed
                                                                       # NOTE: it is slightly different in C++
                 | /\*\*/                                              # Match C style comments (empty comment line)
@@ -65,7 +65,7 @@ class CsCodeParser(object):
                                                                       # NOTE: it is slightly different in C++
                 | \'(?:\\.|[^\\\'])*\'                                # Match quoted strings
                 | "(?:\\.|[^\\"])*"                                   # Match double quoted strings
-                | (((?<=\n|\r)|^)[ \t]*[#].*?(?=\n|\r|\r\n))          # Match preprocessor
+                | (((?<=\n|\r)|^)[ \t]*[#].*?(?=\n|\r\n|\r))          # Match preprocessor
                                                                       # NOTE: end of line is NOT consumed
                                                                       # NOTE: beginning of line is NOT consumed
                                                                       # NOTE: C# does not support backslashing as C++ does
@@ -98,12 +98,13 @@ class CsCodeParser(object):
                 | [\[\]{};]                                               # Match block start/end and statement separator
                                                                       # NOTE: C++ parser includes processing of <> and : 
                                                                       #       to handle template definitions, it is easier in C#
-                | ((?:\n|\r|\r\n)\s*(?:\n|\r|\r\n))                   # Match double empty line
+                | ((?:\n|\r\n|\r)\s*(?:\n|\r\n|\r))                   # Match double empty line
             ''',
             re.DOTALL | re.MULTILINE | re.VERBOSE
         )
 
-    regex_ln = re.compile(r'(\n)|(\r)|(\r\n)')
+    # \r\n goes before \r in order to consume right number of lines on Unix for Windows files
+    regex_ln = re.compile(r'(\n)|(\r\n)|(\r)')
 
     def run(self, data):
         self.__init__() # Go to initial state if it is called twice

+ 5 - 4
mainline/ext/std/code/java.py

@@ -55,8 +55,8 @@ class Plugin(core.api.Plugin, core.api.Parent, core.api.IParser, core.api.IConfi
 class JavaCodeParser(object):
     
     regex_cpp = re.compile(r'''
-                   //(?=\n|\r|\r\n)                                   # Match Java style comments (empty comment line)
-                |  //.*?(?=\n|\r|\r\n)                                # Match Java style comments
+                   //(?=\n|\r\n|\r)                                   # Match Java style comments (empty comment line)
+                |  //.*?(?=\n|\r\n|\r)                                # Match Java style comments
                                                                       # NOTE: end of line is NOT consumed
                                                                       # NOTE: it is slightly different in C++
                 | /\*\*/                                              # Match C style comments (empty comment line)
@@ -78,12 +78,13 @@ class JavaCodeParser(object):
                 | [{};]                                               # Match block start/end and statement separator
                                                                       # NOTE: C++ parser includes processing of <> and : 
                                                                       #       to handle template definitions, it is easier in Java
-                | ((?:\n|\r|\r\n)\s*(?:\n|\r|\r\n))                   # Match double empty line
+                | ((?:\n|\r\n|\r)\s*(?:\n|\r\n|\r))                   # Match double empty line
             ''',
             re.DOTALL | re.MULTILINE | re.VERBOSE
         )
 
-    regex_ln = re.compile(r'(\n)|(\r)|(\r\n)')
+    # \r\n goes before \r in order to consume right number of lines on Unix for Windows files
+    regex_ln = re.compile(r'(\n)|(\r\n)|(\r)')
 
     def run(self, data):
         self.__init__() # Go to initial state if it is called twice

+ 0 - 7
mainline/readme.txt

@@ -30,9 +30,6 @@ WARNING: Metrix++ Project does not use svn repository,
      - metrics counters,
      - post processing tools.
 
-    Check change log for the information about new features and defect fixes:
-     - http://metrixplusplus.svn.sourceforge.net/viewvc/metrixplusplus/mainline/changelog.txt
-
     Check projects documentation for additional information:
      - http://metrixplusplus.sourceforge.net
      - ./doc folder within the distributive
@@ -40,10 +37,6 @@ WARNING: Metrix++ Project does not use svn repository,
     Bug reporting, feature requests and feedback:
      - e-mail to project administrator: avkonst@users.sourceforge.net
 
-    Follow the Project / Subscribe to Updates:
-     - Click 'Subscribe to Updates' button on sourceforge:
-       https://sourceforge.net/projects/metrixplusplus/
-
     ---
     Thank you for using the tool!
 

+ 7 - 1
mainline/tests/general/test_basic/test_export_format_export_nest_per_file_stdout.gold.txt

@@ -92,7 +92,13 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="3" total="8.0" avg="1.33333333333" min="0">
+                <cyclomatic total="8.0" max="3" avg="1.33333333333" min="0">
+                    <distribution-bars>
+                        <distribution-bar count="1" __diff__="0" metric="0" />
+                        <distribution-bar count="3" __diff__="0" metric="1" />
+                        <distribution-bar count="1" __diff__="0" metric="2" />
+                        <distribution-bar count="1" __diff__="0" metric="3" />
+                    </distribution-bars>
                     <__diff__ max="0" total="2.0" avg="-0.166666666667" min="-1" />
                 </cyclomatic>
             </std.code.complexity>

+ 7 - 1
mainline/tests/general/test_basic/test_export_format_export_nest_stdout.gold.txt

@@ -10,7 +10,13 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="3" total="11.0" avg="1.57142857143" min="0">
+                <cyclomatic total="11.0" max="3" avg="1.57142857143" min="0">
+                    <distribution-bars>
+                        <distribution-bar count="1" __diff__="0" metric="0" />
+                        <distribution-bar count="3" __diff__="0" metric="1" />
+                        <distribution-bar count="1" __diff__="0" metric="2" />
+                        <distribution-bar count="2" __diff__="1" metric="3" />
+                    </distribution-bars>
                     <__diff__ max="0" total="5.0" avg="0.0714285714286" min="-1" />
                 </cyclomatic>
             </std.code.complexity>

+ 1 - 1
mainline/tests/general/test_basic/test_export_format_export_python_stdout.gold.txt

@@ -1 +1 @@
-{'export': [{'data: {'info': {'path': '', 'id': 1}, 'file-data': {}, 'subfiles': [], 'subdirs': [u'.'], 'aggregated-data': {'std.code.complexity': {'cyclomatic': {'total': 6.0, 'max': 3, 'avg': 1.5, 'min': 1}}}}}]}
+{'export': [{'data: {'info': {'path': '', 'id': 1}, 'file-data': {}, 'subfiles': [], 'subdirs': [u'.'], 'aggregated-data': {'std.code.complexity': {'cyclomatic': {'total': 6.0, 'max': 3, 'avg': 1.5, 'distribution-bars': [{'count': 3, 'metric': 1}, {'count': 1, 'metric': 3}], 'min': 1}}}}}]}

+ 10 - 1
mainline/tests/general/test_basic/test_export_format_export_txt_stdout.gold.txt

@@ -17,7 +17,16 @@ data:
 .   .   .   .   max="3"
 .   .   .   .   total="6.0"
 .   .   .   .   avg="1.5"
-.   .   .   .   min="1"
+.   .   .   .   min="1" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="3"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="3"
 ================================================================================
 
 

+ 6 - 1
mainline/tests/general/test_basic/test_export_format_export_xml_stdout.gold.txt

@@ -10,7 +10,12 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="3" total="6.0" avg="1.5" min="1" />
+                <cyclomatic max="3" total="6.0" avg="1.5" min="1">
+                    <distribution-bars>
+                        <distribution-bar count="3" metric="1" />
+                        <distribution-bar count="1" metric="3" />
+                    </distribution-bars>
+                </cyclomatic>
             </std.code.complexity>
         </aggregated-data>
     </data>

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_collect_default_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Excluding: ./.unused.cpp
 [LOG]: INFO:	Skipping: ./dummy.txt
 [LOG]: INFO:	Processing: ./simple.cpp
-[LOG]: WARNING:	Exit code: 0. Time spent: 2.4 seconds. Done
+[LOG]: WARNING:	Exit code: 0. Time spent: 2.47 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_collect_second_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Excluding: ./.unused.cpp
 [LOG]: INFO:	Processing: ./simple.cpp
 [LOG]: INFO:	Processing: ./simple2.cpp
-[LOG]: WARNING:	Exit code: 0. Time spent: 0.29 seconds. Done
+[LOG]: WARNING:	Exit code: 0. Time spent: 0.26 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_export_default_stderr.gold.txt

@@ -1,3 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
-[LOG]: WARNING:	Exit code: 0. Time spent: 0.14 seconds. Done
+[LOG]: WARNING:	Exit code: 0. Time spent: 0.11 seconds. Done

+ 6 - 1
mainline/tests/general/test_basic/test_workflow_export_default_stdout.gold.txt

@@ -10,7 +10,12 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="3" total="6.0" avg="1.5" min="1" />
+                <cyclomatic max="3" total="6.0" avg="1.5" min="1">
+                    <distribution-bars>
+                        <distribution-bar count="3" metric="1" />
+                        <distribution-bar count="1" metric="3" />
+                    </distribution-bars>
+                </cyclomatic>
             </std.code.complexity>
         </aggregated-data>
     </data>

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_export_second_per_file_stderr.gold.txt

@@ -1,3 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: ./simple.cpp
-[LOG]: WARNING:	Exit code: 0. Time spent: 0.15 seconds. Done
+[LOG]: WARNING:	Exit code: 0. Time spent: 0.14 seconds. Done

+ 7 - 1
mainline/tests/general/test_basic/test_workflow_export_second_per_file_stdout.gold.txt

@@ -72,7 +72,13 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="3" total="8.0" avg="1.33333333333" min="0">
+                <cyclomatic total="8.0" max="3" avg="1.33333333333" min="0">
+                    <distribution-bars>
+                        <distribution-bar count="1" __diff__="0" metric="0" />
+                        <distribution-bar count="3" __diff__="0" metric="1" />
+                        <distribution-bar count="1" __diff__="0" metric="2" />
+                        <distribution-bar count="1" __diff__="0" metric="3" />
+                    </distribution-bars>
                     <__diff__ max="0" total="2.0" avg="-0.166666666667" min="-1" />
                 </cyclomatic>
             </std.code.complexity>

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_export_second_stderr.gold.txt

@@ -1,3 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
-[LOG]: WARNING:	Exit code: 0. Time spent: 0.15 seconds. Done
+[LOG]: WARNING:	Exit code: 0. Time spent: 0.12 seconds. Done

+ 7 - 1
mainline/tests/general/test_basic/test_workflow_export_second_stdout.gold.txt

@@ -10,7 +10,13 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="3" total="11.0" avg="1.57142857143" min="0">
+                <cyclomatic total="11.0" max="3" avg="1.57142857143" min="0">
+                    <distribution-bars>
+                        <distribution-bar count="1" __diff__="0" metric="0" />
+                        <distribution-bar count="3" __diff__="0" metric="1" />
+                        <distribution-bar count="1" __diff__="0" metric="2" />
+                        <distribution-bar count="2" __diff__="1" metric="3" />
+                    </distribution-bars>
                     <__diff__ max="0" total="5.0" avg="0.0714285714286" min="-1" />
                 </cyclomatic>
             </std.code.complexity>

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_info_default_stderr.gold.txt

@@ -1,2 +1,2 @@
 [LOG]: WARNING:	Logging enabled with INFO level
-[LOG]: WARNING:	Exit code: 0. Time spent: 0.1 seconds. Done
+[LOG]: WARNING:	Exit code: 0. Time spent: 0.09 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_default_stderr.gold.txt

@@ -1,4 +1,4 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 4. Time spent: 0.14 seconds. Done
+[LOG]: WARNING:	Exit code: 4. Time spent: 0.11 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_stderr.gold.txt

@@ -1,4 +1,4 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 6. Time spent: 0.14 seconds. Done
+[LOG]: WARNING:	Exit code: 6. Time spent: 0.13 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_all_stderr.gold.txt

@@ -1,4 +1,4 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 6. Time spent: 0.14 seconds. Done
+[LOG]: WARNING:	Exit code: 6. Time spent: 0.12 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_new_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Identifying changed files...
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 2. Time spent: 0.15 seconds. Done
+[LOG]: WARNING:	Exit code: 2. Time spent: 0.13 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_touched_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Identifying changed files...
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 4. Time spent: 0.14 seconds. Done
+[LOG]: WARNING:	Exit code: 4. Time spent: 0.12 seconds. Done

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_trend_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Identifying changed files...
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 3. Time spent: 0.14 seconds. Done
+[LOG]: WARNING:	Exit code: 3. Time spent: 0.12 seconds. Done

+ 7 - 1
mainline/tests/general/test_std_code_cpp/test_parser_export_default_stdout.gold.txt

@@ -10,7 +10,13 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="2" total="17.0" avg="0.197674418605" min="0" />
+                <cyclomatic max="2" total="17.0" avg="0.197674418605" min="0">
+                    <distribution-bars>
+                        <distribution-bar count="74" metric="0" />
+                        <distribution-bar count="7" metric="1" />
+                        <distribution-bar count="5" metric="2" />
+                    </distribution-bars>
+                </cyclomatic>
             </std.code.complexity>
         </aggregated-data>
     </data>

+ 62 - 5
mainline/tests/general/test_std_code_cpp/test_parser_export_files_stdout.gold.txt

@@ -89,7 +89,12 @@ data:
 .   .   .   .   max="0"
 .   .   .   .   total="0.0"
 .   .   .   .   avg="0.0"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="3"
+.   .   .   .   .   .   metric="0"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -802,7 +807,20 @@ data:
 .   .   .   .   max="2"
 .   .   .   .   total="4.0"
 .   .   .   .   avg="0.078431372549"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="48"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="2"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="2"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -1208,7 +1226,20 @@ data:
 .   .   .   .   max="2"
 .   .   .   .   total="6.0"
 .   .   .   .   avg="0.25"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="20"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="2"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="2"
+.   .   .   .   .   .   metric="2"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -1310,7 +1341,20 @@ data:
 .   .   .   .   max="2"
 .   .   .   .   total="3.0"
 .   .   .   .   avg="0.75"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="2"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="2"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -1434,7 +1478,20 @@ data:
 .   .   .   .   max="2"
 .   .   .   .   total="4.0"
 .   .   .   .   avg="1.0"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="2"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="2"
 ================================================================================
 
 

+ 11 - 1
mainline/tests/general/test_std_code_cs/test_parser_export_default_stdout.gold.txt

@@ -10,7 +10,17 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="6" total="45.0" avg="1.15384615385" min="0" />
+                <cyclomatic max="6" total="45.0" avg="1.15384615385" min="0">
+                    <distribution-bars>
+                        <distribution-bar count="23" metric="0" />
+                        <distribution-bar count="7" metric="1" />
+                        <distribution-bar count="1" metric="2" />
+                        <distribution-bar count="1" metric="3" />
+                        <distribution-bar count="3" metric="4" />
+                        <distribution-bar count="3" metric="5" />
+                        <distribution-bar count="1" metric="6" />
+                    </distribution-bars>
+                </cyclomatic>
             </std.code.complexity>
         </aggregated-data>
     </data>

+ 54 - 3
mainline/tests/general/test_std_code_cs/test_parser_export_files_stdout.gold.txt

@@ -331,7 +331,36 @@ data:
 .   .   .   .   max="6"
 .   .   .   .   total="39.0"
 .   .   .   .   avg="2.05263157895"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="6"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="5"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="2"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="3"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="2"
+.   .   .   .   .   .   metric="4"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="3"
+.   .   .   .   .   .   metric="5"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="6"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -544,7 +573,16 @@ data:
 .   .   .   .   max="1"
 .   .   .   .   total="1.0"
 .   .   .   .   avg="0.125"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="7"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="1"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -789,7 +827,20 @@ data:
 .   .   .   .   max="4"
 .   .   .   .   total="5.0"
 .   .   .   .   avg="0.416666666667"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="10"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="4"
 ================================================================================
 
 

+ 10 - 1
mainline/tests/general/test_std_code_java/test_parser_export_default_stdout.gold.txt

@@ -10,7 +10,16 @@
         </subdirs>
         <aggregated-data>
             <std.code.complexity>
-                <cyclomatic max="6" total="126.0" avg="1.05882352941" min="0" />
+                <cyclomatic max="6" total="126.0" avg="1.05882352941" min="0">
+                    <distribution-bars>
+                        <distribution-bar count="58" metric="0" />
+                        <distribution-bar count="31" metric="1" />
+                        <distribution-bar count="15" metric="2" />
+                        <distribution-bar count="8" metric="3" />
+                        <distribution-bar count="1" metric="5" />
+                        <distribution-bar count="6" metric="6" />
+                    </distribution-bars>
+                </cyclomatic>
             </std.code.complexity>
         </aggregated-data>
     </data>

+ 68 - 4
mainline/tests/general/test_std_code_java/test_parser_export_files_stdout.gold.txt

@@ -655,7 +655,28 @@ data:
 .   .   .   .   max="6"
 .   .   .   .   total="83.0"
 .   .   .   .   avg="1.93023255814"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="8"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="14"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="9"
+.   .   .   .   .   .   metric="2"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="7"
+.   .   .   .   .   .   metric="3"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="5"
+.   .   .   .   .   .   metric="6"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -712,7 +733,12 @@ data:
 .   .   .   .   max="0"
 .   .   .   .   total="0.0"
 .   .   .   .   avg="0.0"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="0"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -1381,7 +1407,32 @@ data:
 .   .   .   .   max="6"
 .   .   .   .   total="37.0"
 .   .   .   .   avg="0.840909090909"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="23"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="13"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="5"
+.   .   .   .   .   .   metric="2"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="3"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="5"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="6"
 ================================================================================
 --------------------------------------------------------------------------------
 data:  
@@ -1868,7 +1919,20 @@ data:
 .   .   .   .   max="2"
 .   .   .   .   total="6.0"
 .   .   .   .   avg="0.193548387097"
-.   .   .   .   min="0"
+.   .   .   .   min="0" 
+.   .   .   .   distribution-bars:
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="26"
+.   .   .   .   .   .   metric="0"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="4"
+.   .   .   .   .   .   metric="1"
+.   .   .   .   
+.   .   .   .   .   distribution-bar: 
+.   .   .   .   .   .   count="1"
+.   .   .   .   .   .   metric="2"
 ================================================================================
 
 

+ 18 - 4
mainline/tools/export.py

@@ -217,7 +217,10 @@ def append_diff(main_tree, prev_tree):
                         main_val = 0
                     if prev_val == None:
                         prev_val = 0
-                    diff[key] = main_val - prev_val
+                    if isinstance(main_val, list) and isinstance(prev_val, list):
+                        main_tree[name][field][key] = append_diff_list(main_val, prev_val)
+                    else:
+                        diff[key] = main_val - prev_val
                 main_tree[name][field]['__diff__'] = diff
             elif (not isinstance(main_tree[name][field], dict)) and (not isinstance(prev_tree[name][field], dict)):
                 if '__diff__' not in main_tree[name]:
@@ -225,6 +228,17 @@ def append_diff(main_tree, prev_tree):
                 main_tree[name]['__diff__'][field] = main_tree[name][field] - prev_tree[name][field]
     return main_tree
 
-    
-    
-  
+def append_diff_list(main_list, prev_list):
+    merged_list = {}
+    for bar in main_list:
+        merged_list[bar['metric']] = {'count': bar['count'], '__diff__':0}
+    for bar in prev_list:
+        if bar['metric'] in merged_list.keys():
+            merged_list[bar['metric']]['__diff__'] = \
+                merged_list[bar['metric']]['count'] - bar['count']
+        else:
+            merged_list[bar['metric']] = {'count': 0, '__diff__':-bar['count']}
+    result = []
+    for metric in sorted(merged_list.keys()):
+        result.append({'metric':metric, 'count':merged_list[metric]['count'], '__diff__':merged_list[metric]['__diff__']})
+    return result

+ 3 - 10
mainline/tools/info.py

@@ -63,15 +63,8 @@ def main(tool_args):
                 print "(!)",
         print "\t" + each.name + "\t=>\t" + each.value + prev_value_str
 
-    print "Namespaces:"
+    print "\nMetrics:"
     for each in loader.iterate_namespace_names():
-        prev_value_str = ""
-        if loader_prev != None:
-            prev = loader_prev.get_namespace(each)
-            if prev == None:
-                prev_value_str = " [new]"
-                print "(!)",
-        print "\t" + each + prev_value_str
         for field in loader.get_namespace(each).iterate_field_names():
             prev_value_str = ""
             if loader_prev != None:
@@ -79,9 +72,9 @@ def main(tool_args):
                 if prev == None:
                     prev_value_str = " [new]"
                     print "(!)",
-            print "\t\t- " + field + prev_value_str
+            print "\t" + each + ":" + field + prev_value_str
 
-    print "Files:"
+    print "\nFiles:"
     paths = None
     if len(args) == 0:
         paths = [""]

+ 14 - 2
mainline/tools/limit.py

@@ -17,7 +17,6 @@
 #    along with Metrix++.  If not, see <http://www.gnu.org/licenses/>.
 #
 
-
 import logging
 import re
 
@@ -45,11 +44,15 @@ def main(tool_args):
     log_plugin.declare_configuration(parser)
     db_plugin.declare_configuration(parser)
     warn_plugin.declare_configuration(parser)
+    parser.add_option("--general.hotspots", default=None, help="If not set (none), all exceeded limits are printed."
+                      " If set, exceeded limits are sorted (the worst is the first) and only first GENERAL.HOTSPOTS limits are printed."
+                      " [default: %default]", type=int)
 
     (options, args) = parser.parse_args(tool_args)
     log_plugin.configure(options)
     db_plugin.configure(options)
     warn_plugin.configure(options)
+    hotspots = options.__dict__['general.hotspots']
 
     loader_prev = core.db.loader.Loader()
     if db_plugin.dbfile_prev != None:
@@ -91,10 +94,19 @@ def main(tool_args):
             filters = [limit.filter]
             if modified_file_ids != None:
                 filters.append(('file_id', 'IN', modified_file_ids))
+            sort_by = None
+            limit_by = None
+            if hotspots != None:
+                sort_by = limit.field
+                if limit.type == "max":
+                    sort_by = "-" + sort_by
+                limit_by = hotspots
             selected_data = loader.load_selected_data(limit.namespace,
                                                    fields = [limit.field],
                                                    path=path,
-                                                   filters = filters)
+                                                   filters = filters,
+                                                   sort_by=sort_by,
+                                                   limit_by=limit_by)
             if selected_data == None:
                 logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
                 exit_code += 1