Browse Source

refactoring

avkonst 11 years ago
parent
commit
5aa4bac2dc

+ 21 - 0
mainline/core/db/utils.py

@@ -17,6 +17,9 @@
 #    along with Metrix++.  If not, see <http://www.gnu.org/licenses/>.
 #
 
+import logging
+import re
+
 class FileRegionsMatcher(object):
 
     class FileRegionsDisposableGetter(object):
@@ -91,3 +94,21 @@ class FileRegionsMatcher(object):
 
     def is_modified(self, curr_id):
         return self.ids[curr_id][1]
+
+def check_db_metadata(loader, loader_prev):
+    for each in loader.iterate_properties():
+        prev = loader_prev.get_property(each.name)
+        if prev != each.value:
+            logging.warn("Previous data file has got different metadata:")
+            logging.warn(" - identification of change trends can be not reliable")
+            logging.warn(" - use 'info' tool to view more details")
+            return 1
+    return 0
+
+def preprocess_path(path):
+    path = re.sub(r'''[\\]+''', "/", path)
+    logging.info("Processing: " + path)
+    return path
+
+def report_bad_path(path):
+    logging.error("Specified path '" + path + "' is invalid: not found in the database records.")

+ 3 - 3
mainline/tools/debug.py

@@ -26,7 +26,7 @@ import core.log
 import core.cmdparser
 import core.db.post
 
-import tools.utils
+import core.utils
 
 class Tool(core.api.ITool):
     def run(self, tool_args):
@@ -60,11 +60,11 @@ def dumphtml(args, loader):
     result = ""
     result += '<html><body>'
     for path in args:
-        path = tools.utils.preprocess_path(path)
+        path = core.utils.preprocess_path(path)
         
         data = loader.load_file_data(path)
         if data == None:
-            tools.utils.report_bad_path(path)
+            core.utils.report_bad_path(path)
             exit_code += 1
             continue
         

+ 3 - 3
mainline/tools/export.py

@@ -27,7 +27,7 @@ import core.log
 import core.db.post
 import core.cmdparser
 
-import tools.utils
+import core.utils
 
 class Tool(core.api.ITool):
     def run(self, tool_args):
@@ -106,7 +106,7 @@ def export_to_stdout(out_format, paths, loader, loader_prev):
         assert False, "Unknown output format " + out_format
 
     for path in paths:
-        path = tools.utils.preprocess_path(path)
+        path = core.utils.preprocess_path(path)
         
         files = loader.iterate_file_data(path)
         if files != None:
@@ -121,7 +121,7 @@ def export_to_stdout(out_format, paths, loader, loader_prev):
                     per_file_data.append(file_data.get_data(column[0], column[1]))
                 csvWriter.writerow([file_data.get_path(), None] + per_file_data)
         else:
-            tools.utils.report_bad_path(path)
+            core.utils.report_bad_path(path)
             exit_code += 1
 
     if out_format == 'xml':

+ 3 - 3
mainline/tools/info.py

@@ -23,7 +23,7 @@ import core.db.post
 import core.log
 import core.cmdparser
 
-import tools.utils
+import core.utils
 
 class Tool(core.api.ITool):
     def run(self, tool_args):
@@ -86,11 +86,11 @@ def main(tool_args):
     else:
         paths = args
     for path in paths:
-        path = tools.utils.preprocess_path(path)
+        path = core.utils.preprocess_path(path)
 
         file_iterator = loader.iterate_file_data(path=path)
         if file_iterator == None:
-            tools.utils.report_bad_path(path)
+            core.utils.report_bad_path(path)
             exit_code += 1
             continue
         for each in file_iterator:

+ 6 - 6
mainline/tools/limit.py

@@ -21,12 +21,12 @@ import logging
 
 import core.log
 import core.db.post
-import core.db.utils
+import core.utils
 import core.cout
 import core.warn
 import core.cmdparser
 
-import tools.utils
+import core.utils
 
 import core.api
 class Tool(core.api.ITool):
@@ -73,7 +73,7 @@ def main(tool_args):
     
     # Check for versions consistency
     if db_plugin.dbfile_prev != None:
-        tools.utils.check_db_metadata(loader, loader_prev)
+        core.utils.check_db_metadata(loader, loader_prev)
     
     paths = None
     if len(args) == 0:
@@ -87,7 +87,7 @@ def main(tool_args):
         modified_file_ids = get_list_of_modified_files(loader, loader_prev)
         
     for path in paths:
-        path = tools.utils.preprocess_path(path)
+        path = core.utils.preprocess_path(path)
         
         for limit in warn_plugin.iterate_limits():
             logging.info("Applying limit: " + str(limit))
@@ -108,7 +108,7 @@ def main(tool_args):
                                                    sort_by=sort_by,
                                                    limit_by=limit_by)
             if selected_data == None:
-                tools.utils.report_bad_path(path)
+                core.utils.report_bad_path(path)
                 exit_code += 1
                 continue
             
@@ -122,7 +122,7 @@ def main(tool_args):
                         diff = 0
                         is_modified = False
                     else:
-                        matcher = core.db.utils.FileRegionsMatcher(file_data, file_data_prev)
+                        matcher = core.utils.FileRegionsMatcher(file_data, file_data_prev)
                         prev_id = matcher.get_prev_id(select_data.get_region().get_id())
                         if matcher.is_matched(select_data.get_region().get_id()):
                             if matcher.is_modified(select_data.get_region().get_id()):

+ 0 - 40
mainline/tools/utils.py

@@ -1,40 +0,0 @@
-#
-#    Metrix++, Copyright 2009-2013, Metrix++ Project
-#    Link: http://metrixplusplus.sourceforge.net
-#    
-#    This file is a part of Metrix++ Tool.
-#    
-#    Metrix++ is free software: you can redistribute it and/or modify
-#    it under the terms of the GNU General Public License as published by
-#    the Free Software Foundation, version 3 of the License.
-#    
-#    Metrix++ is distributed in the hope that it will be useful,
-#    but WITHOUT ANY WARRANTY; without even the implied warranty of
-#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-#    GNU General Public License for more details.
-#    
-#    You should have received a copy of the GNU General Public License
-#    along with Metrix++.  If not, see <http://www.gnu.org/licenses/>.
-#
-import logging
-import re
-
-def check_db_metadata(loader, loader_prev):
-    for each in loader.iterate_properties():
-        prev = loader_prev.get_property(each.name)
-        if prev != each.value:
-            logging.warn("Previous data file has got different metadata:")
-            logging.warn(" - identification of change trends can be not reliable")
-            logging.warn(" - use 'info' tool to view more details")
-            return 1
-    return 0
-
-def preprocess_path(path):
-    path = re.sub(r'''[\\]+''', "/", path)
-    logging.info("Processing: " + path)
-    return path
-
-def report_bad_path(path):
-    logging.error("Specified path '" + path + "' is invalid: not found in the database records.")
-    
-    

+ 10 - 13
mainline/tools/view.py

@@ -20,11 +20,11 @@
 
 import core.log
 import core.db.post
-import core.db.utils
+import core.utils
 import core.cmdparser
 import core.export.convert
 
-import tools.utils
+import core.utils
 
 import core.api
 class Tool(core.api.ITool):
@@ -51,18 +51,15 @@ def main(tool_args):
     out_format = options.__dict__['format']
     nest_regions = options.__dict__['nest_regions']
 
-    loader_prev = core.api.Loader()
-    if db_plugin.dbfile_prev != None:
-        if loader_prev.open_database(db_plugin.dbfile_prev) == False:
-            parser.error("Can not open file: " + db_plugin.dbfile_prev)
+    log_plugin.initialize()
+    db_plugin.initialize()
 
-    loader = core.api.Loader()
-    if loader.open_database(db_plugin.dbfile) == False:
-        parser.error("Can not open file: " + db_plugin.dbfile)
+    loader_prev = db_plugin.get_loader_prev()
+    loader = db_plugin.get_loader()
 
     # Check for versions consistency
     if db_plugin.dbfile_prev != None:
-        tools.utils.check_db_metadata(loader, loader_prev)
+        core.utils.check_db_metadata(loader, loader_prev)
     
     paths = None
     if len(args) == 0:
@@ -85,7 +82,7 @@ def export_to_str(out_format, paths, loader, loader_prev, nest_regions):
         result += "{'export': ["
 
     for (ind, path) in enumerate(paths):
-        path = tools.utils.preprocess_path(path)
+        path = core.utils.preprocess_path(path)
         
         aggregated_data = loader.load_aggregated_data(path)
         aggregated_data_tree = {}
@@ -96,7 +93,7 @@ def export_to_str(out_format, paths, loader, loader_prev, nest_regions):
             subdirs = aggregated_data.get_subdirs()
             subfiles = aggregated_data.get_subfiles()
         else:
-            tools.utils.report_bad_path(path)
+            core.utils.report_bad_path(path)
             exit_code += 1
         aggregated_data_prev = loader_prev.load_aggregated_data(path)
         if aggregated_data_prev != None:
@@ -140,7 +137,7 @@ def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
     if file_data_prev != None:
         file_data_tree = append_diff(file_data_tree,
                                      file_data_prev.get_data_tree())
-        regions_matcher = core.db.utils.FileRegionsMatcher(file_data, file_data_prev)
+        regions_matcher = core.utils.FileRegionsMatcher(file_data, file_data_prev)
     
     if nest_regions == False:
         regions = []