Browse Source

Minor improvements in code.

avkonst 11 năm trước cách đây
mục cha
commit
a47046bbd8
28 tập tin đã thay đổi với 184 bổ sung136 xóa
  1. 43 31
      mainline/core/dir.py
  2. 21 9
      mainline/metrixpp.py
  3. 4 1
      mainline/tests/general/test_basic.py
  4. 4 4
      mainline/tests/general/test_basic/test_help_--help_default_stdout.gold.txt
  5. 0 0
      mainline/tests/general/test_basic/test_help_unknown_default_stdout.gold.txt
  6. 0 6
      mainline/tests/general/test_basic/test_help_view_default_stdout.gold.txt
  7. 1 1
      mainline/tests/general/test_basic/test_workflow_collect_default_stderr.gold.txt
  8. 1 1
      mainline/tests/general/test_basic/test_workflow_collect_second_stderr.gold.txt
  9. 2 1
      mainline/tests/general/test_basic/test_workflow_info_default_stderr.gold.txt
  10. 2 1
      mainline/tests/general/test_basic/test_workflow_info_second_stderr.gold.txt
  11. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_default_stderr.gold.txt
  12. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_stderr.gold.txt
  13. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_all_stderr.gold.txt
  14. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_new_stderr.gold.txt
  15. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_touched_stderr.gold.txt
  16. 1 1
      mainline/tests/general/test_basic/test_workflow_limit_second_warn_trend_stderr.gold.txt
  17. 1 1
      mainline/tests/general/test_basic/test_workflow_view_default_stderr.gold.txt
  18. 1 1
      mainline/tests/general/test_basic/test_workflow_view_second_per_file_stderr.gold.txt
  19. 1 1
      mainline/tests/general/test_basic/test_workflow_view_second_stderr.gold.txt
  20. 5 5
      mainline/tests/general/test_std_code_cpp/test_parser_view_files_stdout.gold.txt
  21. 3 3
      mainline/tests/general/test_std_code_cs/test_parser_view_files_stdout.gold.txt
  22. 4 4
      mainline/tests/general/test_std_code_java/test_parser_view_files_stdout.gold.txt
  23. 5 1
      mainline/tools/debug.py
  24. 5 6
      mainline/tools/export.py
  25. 5 3
      mainline/tools/info.py
  26. 6 11
      mainline/tools/limit.py
  27. 40 0
      mainline/tools/utils.py
  28. 24 39
      mainline/tools/view.py

+ 43 - 31
mainline/core/dir.py

@@ -80,43 +80,55 @@ class DirectoryReader():
     
     def run(self, plugin, directory):
         
+        def run_per_file(plugin, fname, full_path):
+            exit_code = 0
+            norm_path = re.sub(r'''[\\]''', "/", full_path)
+            if plugin.is_file_excluded(fname) == False:
+                if os.path.isdir(full_path):
+                    if plugin.non_recursively == False:
+                        exit_code += run_recursively(plugin, full_path)
+                else:
+                    parser = plugin.get_plugin_loader().get_parser(full_path)
+                    if parser == None:
+                        logging.info("Skipping: " + norm_path)
+                    else:
+                        logging.info("Processing: " + norm_path)
+                        ts = time.time()
+                        f = open(full_path, 'r');
+                        text = f.read();
+                        f.close()
+                        checksum = binascii.crc32(text) & 0xffffffff # to match python 3
+
+                        (data, is_updated) = plugin.get_plugin_loader().get_database_loader().create_file_data(norm_path, checksum, text)
+                        procerrors = parser.process(plugin, data, is_updated)
+                        if plugin.is_proctime_enabled == True:
+                            data.set_data('std.general', 'proctime', time.time() - ts)
+                        if plugin.is_procerrors_enabled == True and procerrors != None and procerrors != 0:
+                            data.set_data('std.general', 'procerrors', procerrors)
+                        if plugin.is_size_enabled == True:
+                            data.set_data('std.general', 'size', len(text))
+                        plugin.get_plugin_loader().get_database_loader().save_file_data(data)
+                        logging.debug("-" * 60)
+                        exit_code += procerrors
+            else:
+                logging.info("Excluding: " + norm_path)
+            return exit_code
+        
         def run_recursively(plugin, directory):
             exit_code = 0
             for fname in os.listdir(directory):
                 full_path = os.path.join(directory, fname)
-                norm_path = re.sub(r'''[\\]''', "/", full_path)
-                if plugin.is_file_excluded(fname) == False:
-                    if os.path.isdir(full_path):
-                        if plugin.non_recursively == False:
-                            exit_code += run_recursively(plugin, full_path)
-                    else:
-                        parser = plugin.get_plugin_loader().get_parser(full_path)
-                        if parser == None:
-                            logging.info("Skipping: " + norm_path)
-                        else:
-                            logging.info("Processing: " + norm_path)
-                            ts = time.time()
-                            f = open(full_path, 'r');
-                            text = f.read();
-                            f.close()
-                            checksum = binascii.crc32(text) & 0xffffffff # to match python 3
-    
-                            (data, is_updated) = plugin.get_plugin_loader().get_database_loader().create_file_data(norm_path, checksum, text)
-                            procerrors = parser.process(plugin, data, is_updated)
-                            if plugin.is_proctime_enabled == True:
-                                data.set_data('std.general', 'proctime', time.time() - ts)
-                            if plugin.is_procerrors_enabled == True and procerrors != None and procerrors != 0:
-                                data.set_data('std.general', 'procerrors', procerrors)
-                            if plugin.is_size_enabled == True:
-                                data.set_data('std.general', 'size', len(text))
-                            plugin.get_plugin_loader().get_database_loader().save_file_data(data)
-                            logging.debug("-" * 60)
-                            exit_code += procerrors
-                else:
-                    logging.info("Excluding: " + norm_path)
+                exit_code += run_per_file(plugin, fname, full_path)
             return exit_code
         
-        total_errors = run_recursively(plugin, directory)
+        if os.path.exists(directory) == False:
+            logging.error("Skipping (does not exist): " + directory)
+            return 1
+        
+        if os.path.isdir(directory):
+            total_errors = run_recursively(plugin, directory)
+        else:
+            total_errors = run_per_file(plugin, os.path.basename(directory), directory)
         total_errors = total_errors # used, warnings are per file if not zero
         return 0 # ignore errors, collection is successful anyway
     

+ 21 - 9
mainline/metrixpp.py

@@ -31,14 +31,19 @@ def main():
     
     os.environ['METRIXPLUSPLUS_INSTALL_DIR'] = os.path.dirname(os.path.abspath(__file__))
     
+    this_file = os.path.basename(__file__)
+    
     available_tools = []
+    excluded_tools = ['utils']
+    internal_tools = ['debug', 'test']
     for fname in os.listdir(os.path.join(os.environ['METRIXPLUSPLUS_INSTALL_DIR'], 'tools')):
         tool_name = os.path.splitext(fname)[0]
         if tool_name == '__init__':
             continue
         if tool_name not in available_tools:
-            available_tools.append(tool_name)
-
+            if tool_name not in excluded_tools + internal_tools:
+                available_tools.append(tool_name)
+    
     exemode = None
     if len(sys.argv[1:]) != 0:
         exemode = sys.argv[1]
@@ -51,14 +56,21 @@ def main():
     if len(sys.argv[1:]) > 1:
         command = sys.argv[2]
         
-    if command not in available_tools:
-        logging.error("Unknown action: " + str(command))
-        print "Usage: {prog} <action> --help".format(prog=__file__)
-        print "   or: {prog} <action> [options] -- [path 1] ... [path N]".format(prog=__file__)
+    if command == '--help' or command == '-h' or command == '--h':
+        print "Usage: python {prog} <action> --help".format(prog=this_file)
+        print "   or: python {prog} <action> [options] -- [path 1] ... [path N]".format(prog=this_file)
         print "where: actions are:"
-        for each in available_tools:
+        for each in sorted(available_tools):
             print "\t" + each
-        return 1
+        if exemode == '-D':
+            for each in sorted(internal_tools):
+                print "\t" + each + "\t[internal]"
+        exit(0)
+        
+    if command not in available_tools + internal_tools:
+        print >> sys.stderr, "Usage: python {prog} --help\n".format(prog=this_file)
+        print >> sys.stderr, "{prog}: error: no such action: {action}".format(prog=this_file, action=command)
+        exit(1)
 
     tool = __import__('tools', globals(), locals(), [command], -1)
     module_attr = tool.__getattribute__(command)
@@ -75,5 +87,5 @@ if __name__ == '__main__':
     if 'METRIXPLUSPLUS_TEST_GENERATE_GOLDS' in os.environ.keys() and \
         os.environ['METRIXPLUSPLUS_TEST_GENERATE_GOLDS'] == "True":
         time_spent = 1 # Constant value if under tests
-    logging.warning("Exit code: " + str(exit_code) + ". Time spent: " + str(time_spent) + " seconds. Done")
+    logging.warning("Done (" + str(time_spent) +" seconds). Exit code: " + str(exit_code))
     exit(exit_code)

+ 4 - 1
mainline/tests/general/test_basic.py

@@ -135,7 +135,10 @@ class Test(tests.common.TestCase):
 
     def test_help(self):
         
-        runner = tests.common.ToolRunner('--help', exit_code=1)
+        runner = tests.common.ToolRunner('--help')
+        self.assertExec(runner.run())
+
+        runner = tests.common.ToolRunner('unknown', exit_code=1)
         self.assertExec(runner.run())
 
         runner = tests.common.ToolRunner('collect', ['--help'])

+ 4 - 4
mainline/tests/general/test_basic/test_help_--help_default_stdout.gold.txt

@@ -1,10 +1,10 @@
-Usage: C:\Projects\Metrix++\mainline\metrixpp.py <action> --help
-   or: C:\Projects\Metrix++\mainline\metrixpp.py <action> [options] -- [path 1] ... [path N]
+Usage: python metrixpp.py <action> --help
+   or: python metrixpp.py <action> [options] -- [path 1] ... [path N]
 where: actions are:
 	collect
-	debug
 	export
 	info
 	limit
-	test
 	view
+	debug	[internal]
+	test	[internal]

+ 0 - 0
mainline/tests/general/test_basic/test_help_unknown_default_stdout.gold.txt


+ 0 - 6
mainline/tests/general/test_basic/test_help_view_default_stdout.gold.txt

@@ -20,12 +20,6 @@ Options:
   --format=FORMAT, --ft=FORMAT
                         Format of the output data. Possible values are 'xml',
                         'txt' or 'python' [default: xml]
-  --namespaces=NAMESPACES, --ns=NAMESPACES
-                        Allows to enumerate namespaces of interest. If not
-                        defined all namespaces available in database file will
-                        be processed. Separate several namespaces by comma,
-                        for example 'general,std.code.complexity' [default:
-                        none]
   --nest-regions, --nr  If the option is set (True), data for regions is
                         exported in the form of a tree. Otherwise, all regions
                         are exported in plain list. [default: False]

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_collect_default_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Excluding: ./.unused.cpp
 [LOG]: INFO:	Skipping: ./dummy.txt
 [LOG]: INFO:	Processing: ./simple.cpp
-[LOG]: WARNING:	Exit code: 0. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 0

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_collect_second_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Excluding: ./.unused.cpp
 [LOG]: INFO:	Processing: ./simple.cpp
 [LOG]: INFO:	Processing: ./simple2.cpp
-[LOG]: WARNING:	Exit code: 0. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 0

+ 2 - 1
mainline/tests/general/test_basic/test_workflow_info_default_stderr.gold.txt

@@ -1,2 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
-[LOG]: WARNING:	Exit code: 0. Time spent: 1 seconds. Done
+[LOG]: INFO:	Processing: 
+[LOG]: WARNING:	Done (1 seconds). Exit code: 0

+ 2 - 1
mainline/tests/general/test_basic/test_workflow_info_second_stderr.gold.txt

@@ -1,2 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
-[LOG]: WARNING:	Exit code: 0. Time spent: 1 seconds. Done
+[LOG]: INFO:	Processing: 
+[LOG]: WARNING:	Done (1 seconds). Exit code: 0

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_default_stderr.gold.txt

@@ -1,4 +1,4 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 4. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 4

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_stderr.gold.txt

@@ -1,4 +1,4 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 6. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 6

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_all_stderr.gold.txt

@@ -1,4 +1,4 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 6. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 6

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_new_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Identifying changed files...
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 2. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 2

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_touched_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Identifying changed files...
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 4. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 4

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_limit_second_warn_trend_stderr.gold.txt

@@ -2,4 +2,4 @@
 [LOG]: INFO:	Identifying changed files...
 [LOG]: INFO:	Processing: 
 [LOG]: INFO:	Applying limit: namespace 'std.code.complexity', filter '('cyclomatic', '>', 0.0)'
-[LOG]: WARNING:	Exit code: 3. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 3

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_view_default_stderr.gold.txt

@@ -1,3 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
-[LOG]: WARNING:	Exit code: 0. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 0

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_view_second_per_file_stderr.gold.txt

@@ -1,3 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: ./simple.cpp
-[LOG]: WARNING:	Exit code: 0. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 0

+ 1 - 1
mainline/tests/general/test_basic/test_workflow_view_second_stderr.gold.txt

@@ -1,3 +1,3 @@
 [LOG]: WARNING:	Logging enabled with INFO level
 [LOG]: INFO:	Processing: 
-[LOG]: WARNING:	Exit code: 0. Time spent: 1 seconds. Done
+[LOG]: WARNING:	Done (1 seconds). Exit code: 0

+ 5 - 5
mainline/tests/general/test_std_code_cpp/test_parser_view_files_stdout.gold.txt

@@ -5,7 +5,7 @@ ________________________________________________________________________________
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\function_ends_on_class.cpp"
+.   .   path="./function_ends_on_class.cpp"
 .   .   id="1"
 .   file-data:  
 .   .   regions:
@@ -101,7 +101,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\operator_test.hpp"
+.   .   path="./operator_test.hpp"
 .   .   id="2"
 .   file-data:  
 .   .   regions:
@@ -831,7 +831,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\test.c"
+.   .   path="./test.c"
 .   .   id="3"
 .   file-data:  
 .   .   regions:
@@ -1254,7 +1254,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\test2.cpp"
+.   .   path="./test2.cpp"
 .   .   id="4"
 .   file-data:  
 .   .   regions:
@@ -1373,7 +1373,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\test3.cpp"
+.   .   path="./test3.cpp"
 .   .   id="5"
 .   file-data:  
 .   .   regions:

+ 3 - 3
mainline/tests/general/test_std_code_cs/test_parser_view_files_stdout.gold.txt

@@ -5,7 +5,7 @@ ________________________________________________________________________________
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\File.cs"
+.   .   path="./File.cs"
 .   .   id="1"
 .   file-data:  
 .   .   regions:
@@ -373,7 +373,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\Generics.cs"
+.   .   path="./Generics.cs"
 .   .   id="2"
 .   file-data:  
 .   .   regions:
@@ -598,7 +598,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\interface.cs"
+.   .   path="./interface.cs"
 .   .   id="3"
 .   file-data:  
 .   .   regions:

+ 4 - 4
mainline/tests/general/test_std_code_java/test_parser_view_files_stdout.gold.txt

@@ -5,7 +5,7 @@ ________________________________________________________________________________
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\ArrayUtils.java"
+.   .   path="./ArrayUtils.java"
 .   .   id="1"
 .   file-data:  
 .   .   regions:
@@ -687,7 +687,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\attrs.java"
+.   .   path="./attrs.java"
 .   .   id="2"
 .   file-data:  
 .   .   regions:
@@ -751,7 +751,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\BinaryHeapPriorityQueue.java"
+.   .   path="./BinaryHeapPriorityQueue.java"
 .   .   id="3"
 .   file-data:  
 .   .   regions:
@@ -1452,7 +1452,7 @@ data:
 --------------------------------------------------------------------------------
 data:  
 .   info: 
-.   .   path=".\Generics.java"
+.   .   path="./Generics.java"
 .   .   id="4"
 .   file-data:  
 .   .   regions:

+ 5 - 1
mainline/tools/debug.py

@@ -26,6 +26,8 @@ import core.cmdparser
 import core.db.post
 import core.db.loader
 
+import tools.utils
+
 import core.api
 class Tool(core.api.ITool):
     def run(self, tool_args):
@@ -58,9 +60,11 @@ def dumphtml(args, loader):
     result = ""
     result += '<html><body>'
     for path in args:
+        path = tools.utils.preprocess_path(path)
+        
         data = loader.load_file_data(path)
         if data == None:
-            logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
+            tools.utils.report_bad_path(path)
             exit_code += 1
             continue
         

+ 5 - 6
mainline/tools/export.py

@@ -20,15 +20,14 @@
 
 
 import logging
-import re
 import csv
 
 import core.log
 import core.db.loader
 import core.db.post
-import core.db.utils
 import core.cmdparser
-import core.export.convert
+
+import tools.utils
 
 import core.api
 class Tool(core.api.ITool):
@@ -104,8 +103,8 @@ def export_to_stdout(out_format, paths, loader, loader_prev):
     else:
         assert False, "Unknown output format " + out_format
 
-    for (ind, path) in enumerate(paths):
-        logging.info("Processing: " + re.sub(r'''[\\]''', "/", path))
+    for path in paths:
+        path = tools.utils.preprocess_path(path)
         
         files = loader.iterate_file_data(path)
         if files != None:
@@ -120,7 +119,7 @@ def export_to_stdout(out_format, paths, loader, loader_prev):
                     per_file_data.append(file_data.get_data(column[0], column[1]))
                 csvWriter.writerow([file_data.get_path(), None] + per_file_data)
         else:
-            logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
+            tools.utils.report_bad_path(path)
             exit_code += 1
 
     if out_format == 'xml':

+ 5 - 3
mainline/tools/info.py

@@ -18,13 +18,13 @@
 #
 
 
-import logging
-
 import core.db.loader
 import core.db.post
 import core.log
 import core.cmdparser
 
+import tools.utils
+
 import core.api
 class Tool(core.api.ITool):
     def run(self, tool_args):
@@ -84,9 +84,11 @@ def main(tool_args):
     else:
         paths = args
     for path in paths:
+        path = tools.utils.preprocess_path(path)
+
         file_iterator = loader.iterate_file_data(path=path)
         if file_iterator == None:
-            logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
+            tools.utils.report_bad_path(path)
             exit_code += 1
             continue
         for each in file_iterator:

+ 6 - 11
mainline/tools/limit.py

@@ -18,7 +18,6 @@
 #
 
 import logging
-import re
 
 import core.log
 import core.db.loader
@@ -28,6 +27,8 @@ import core.cout
 import core.warn
 import core.cmdparser
 
+import tools.utils
+
 import core.api
 class Tool(core.api.ITool):
     def run(self, tool_args):
@@ -70,14 +71,8 @@ def main(tool_args):
         warn_plugin.verify_fields(each, loader.get_namespace(each).iterate_field_names())
     
     # Check for versions consistency
-    for each in loader.iterate_properties():
-        if db_plugin.dbfile_prev != None:
-            prev = loader_prev.get_property(each.name)
-            if prev != each.value:
-                logging.warn("Previous data has got different metadata:")
-                logging.warn(" - identification of change trends can be not reliable")
-                logging.warn(" - use 'info' tool to get more details")
-                break
+    if db_plugin.dbfile_prev != None:
+        tools.utils.check_db_metadata(loader, loader_prev)
     
     paths = None
     if len(args) == 0:
@@ -91,7 +86,7 @@ def main(tool_args):
         modified_file_ids = get_list_of_modified_files(loader, loader_prev)
         
     for path in paths:
-        logging.info("Processing: " + re.sub(r'''[\\]''', "/", path))
+        path = tools.utils.preprocess_path(path)
         
         for limit in warn_plugin.iterate_limits():
             logging.info("Applying limit: " + str(limit))
@@ -112,7 +107,7 @@ def main(tool_args):
                                                    sort_by=sort_by,
                                                    limit_by=limit_by)
             if selected_data == None:
-                logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
+                tools.utils.report_bad_path(path)
                 exit_code += 1
                 continue
             

+ 40 - 0
mainline/tools/utils.py

@@ -0,0 +1,40 @@
+#
+#    Metrix++, Copyright 2009-2013, Metrix++ Project
+#    Link: http://metrixplusplus.sourceforge.net
+#    
+#    This file is a part of Metrix++ Tool.
+#    
+#    Metrix++ is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation, version 3 of the License.
+#    
+#    Metrix++ is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+#    GNU General Public License for more details.
+#    
+#    You should have received a copy of the GNU General Public License
+#    along with Metrix++.  If not, see <http://www.gnu.org/licenses/>.
+#
+import logging
+import re
+
+def check_db_metadata(loader, loader_prev):
+    for each in loader.iterate_properties():
+        prev = loader_prev.get_property(each.name)
+        if prev != each.value:
+            logging.warn("Previous data file has got different metadata:")
+            logging.warn(" - identification of change trends can be not reliable")
+            logging.warn(" - use 'info' tool to view more details")
+            return 1
+    return 0
+
+def preprocess_path(path):
+    path = re.sub(r'''[\\]+''', "/", path)
+    logging.info("Processing: " + path)
+    return path
+
+def report_bad_path(path):
+    logging.error("Specified path '" + path + "' is invalid: not found in the database records.")
+    
+    

+ 24 - 39
mainline/tools/view.py

@@ -18,10 +18,6 @@
 #
 
 
-
-import logging
-import re
-
 import core.log
 import core.db.loader
 import core.db.post
@@ -29,6 +25,8 @@ import core.db.utils
 import core.cmdparser
 import core.export.convert
 
+import tools.utils
+
 import core.api
 class Tool(core.api.ITool):
     def run(self, tool_args):
@@ -44,10 +42,6 @@ def main(tool_args):
     db_plugin.declare_configuration(parser)
     parser.add_option("--format", "--ft", default='xml', choices=['txt', 'xml', 'python'], help="Format of the output data. "
                       "Possible values are 'xml', 'txt' or 'python' [default: %default]")
-    parser.add_option("--namespaces", "--ns", default=None, help="Allows to enumerate namespaces of interest."
-                      " If not defined all namespaces available in database file will be processed."
-                      " Separate several namespaces by comma, for example 'general,std.code.complexity'"
-                      " [default: %default]")
     parser.add_option("--nest-regions", "--nr", action="store_true", default=False,
                       help="If the option is set (True), data for regions is exported in the form of a tree. "
                       "Otherwise, all regions are exported in plain list. [default: %default]")
@@ -57,9 +51,6 @@ def main(tool_args):
     db_plugin.configure(options)
     out_format = options.__dict__['format']
     nest_regions = options.__dict__['nest_regions']
-    namespaces = None
-    if options.__dict__['namespaces'] != None:
-        namespaces = re.split(',', options.__dict__['namespaces'])
 
     loader_prev = core.db.loader.Loader()
     if db_plugin.dbfile_prev != None:
@@ -69,26 +60,20 @@ def main(tool_args):
     loader.open_database(db_plugin.dbfile)
     
     # Check for versions consistency
-    for each in loader.iterate_properties():
-        if db_plugin.dbfile_prev != None:
-            prev = loader_prev.get_property(each.name)
-            if prev != each.value:
-                logging.warn("Previous data has got different metadata:")
-                logging.warn(" - identification of change trends can be not reliable")
-                logging.warn(" - use 'info' tool to get more details")
-                break
+    if db_plugin.dbfile_prev != None:
+        tools.utils.check_db_metadata(loader, loader_prev)
     
     paths = None
     if len(args) == 0:
         paths = [""]
     else:
         paths = args
-        
-    (result, exit_code) = export_to_str(out_format, paths, loader, loader_prev, namespaces, nest_regions)
+    
+    (result, exit_code) = export_to_str(out_format, paths, loader, loader_prev, nest_regions)
     print result
     return exit_code
 
-def export_to_str(out_format, paths, loader, loader_prev, namespaces, nest_regions):
+def export_to_str(out_format, paths, loader, loader_prev, nest_regions):
     exit_code = 0
     result = ""
     if out_format == 'txt':
@@ -99,30 +84,30 @@ def export_to_str(out_format, paths, loader, loader_prev, namespaces, nest_regio
         result += "{'export': ["
 
     for (ind, path) in enumerate(paths):
-        logging.info("Processing: " + re.sub(r'''[\\]''', "/", path))
+        path = tools.utils.preprocess_path(path)
         
-        aggregated_data = loader.load_aggregated_data(path, namespaces=namespaces)
+        aggregated_data = loader.load_aggregated_data(path)
         aggregated_data_tree = {}
         subdirs = []
         subfiles = []
         if aggregated_data != None:
-            aggregated_data_tree = aggregated_data.get_data_tree(namespaces=namespaces)
+            aggregated_data_tree = aggregated_data.get_data_tree()
             subdirs = aggregated_data.get_subdirs()
             subfiles = aggregated_data.get_subfiles()
         else:
-            logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
+            tools.utils.report_bad_path(path)
             exit_code += 1
-        aggregated_data_prev = loader_prev.load_aggregated_data(path, namespaces=namespaces)
+        aggregated_data_prev = loader_prev.load_aggregated_data(path)
         if aggregated_data_prev != None:
             aggregated_data_tree = append_diff(aggregated_data_tree,
-                                           aggregated_data_prev.get_data_tree(namespaces=namespaces))
+                                           aggregated_data_prev.get_data_tree())
         
         file_data = loader.load_file_data(path)
         file_data_tree = {}
         if file_data != None:
-            file_data_tree = file_data.get_data_tree(namespaces=namespaces) 
+            file_data_tree = file_data.get_data_tree() 
             file_data_prev = loader_prev.load_file_data(path)
-            append_regions(file_data_tree, file_data, file_data_prev, namespaces, nest_regions)
+            append_regions(file_data_tree, file_data, file_data_prev, nest_regions)
         
         data = {"info": {"path": path, "id": ind + 1},
                 "aggregated-data": aggregated_data_tree,
@@ -149,21 +134,21 @@ def export_to_str(out_format, paths, loader, loader_prev, namespaces, nest_regio
         
     return (result, exit_code)
 
-def append_regions(file_data_tree, file_data, file_data_prev, namespaces, nest_regions):
+def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
     regions_matcher = None
     if file_data_prev != None:
         file_data_tree = append_diff(file_data_tree,
-                                     file_data_prev.get_data_tree(namespaces=namespaces))
+                                     file_data_prev.get_data_tree())
         regions_matcher = core.db.utils.FileRegionsMatcher(file_data, file_data_prev)
     
     if nest_regions == False:
         regions = []
         for region in file_data.iterate_regions():
-            region_data_tree = region.get_data_tree(namespaces=namespaces)
+            region_data_tree = region.get_data_tree()
             if regions_matcher != None and regions_matcher.is_matched(region.get_id()):
                 region_data_prev = file_data_prev.get_region(regions_matcher.get_prev_id(region.get_id()))
                 region_data_tree = append_diff(region_data_tree,
-                                               region_data_prev.get_data_tree(namespaces=namespaces))
+                                               region_data_prev.get_data_tree())
             regions.append({"info": {"name" : region.name,
                                      'type' : file_data.get_region_types()().to_str(region.get_type()),
                                      "cursor" : region.cursor,
@@ -174,13 +159,13 @@ def append_regions(file_data_tree, file_data, file_data_prev, namespaces, nest_r
                             "data": region_data_tree})
         file_data_tree['regions'] = regions
     else:
-        def append_rec(region_id, file_data_tree, file_data, file_data_prev, namespaces):
+        def append_rec(region_id, file_data_tree, file_data, file_data_prev):
             region = file_data.get_region(region_id)
-            region_data_tree = region.get_data_tree(namespaces=namespaces)
+            region_data_tree = region.get_data_tree()
             if regions_matcher != None and regions_matcher.is_matched(region.get_id()):
                 region_data_prev = file_data_prev.get_region(regions_matcher.get_prev_id(region.get_id()))
                 region_data_tree = append_diff(region_data_tree,
-                                               region_data_prev.get_data_tree(namespaces=namespaces))
+                                               region_data_prev.get_data_tree())
             result = {"info": {"name" : region.name,
                                'type' : file_data.get_region_types()().to_str(region.get_type()),
                                "cursor" : region.cursor,
@@ -191,10 +176,10 @@ def append_regions(file_data_tree, file_data, file_data_prev, namespaces, nest_r
                       "data": region_data_tree,
                       "subregions": []}
             for sub_id in file_data.get_region(region_id).iterate_subregion_ids():
-                result['subregions'].append(append_rec(sub_id, file_data_tree, file_data, file_data_prev, namespaces))
+                result['subregions'].append(append_rec(sub_id, file_data_tree, file_data, file_data_prev))
             return result
         file_data_tree['regions'] = []
-        file_data_tree['regions'].append(append_rec(1, file_data_tree, file_data, file_data_prev, namespaces))
+        file_data_tree['regions'].append(append_rec(1, file_data_tree, file_data, file_data_prev))
 
 def append_diff(main_tree, prev_tree):
     assert(main_tree != None)