Browse Source

added overview table and file + region tables
rethought database loader usage

prozessorkern 5 years ago
parent
commit
60f91cd7a7
1 changed files with 87 additions and 40 deletions
  1. 87 40
      ext/std/tools/report.py

+ 87 - 40
ext/std/tools/report.py

@@ -49,7 +49,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
             subdirs, subfiles = self.loadSubdirs(loader, subdir, subdirs, subfiles)
             subdirs, subfiles = self.loadSubdirs(loader, subdir, subdirs, subfiles)
         return subdirs, subfiles
         return subdirs, subfiles
 
 
-    def create_doxygen_report(self, paths, output_dir, data, loader, loader_prev):
+    def create_doxygen_report(self, paths, output_dir, overview_data, data, loader, loader_prev):
         
         
         exit_code = 1
         exit_code = 1
 
 
@@ -59,17 +59,41 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                 file.write("/* this file is autogenerated by metrixpp - changes will be overwritten */\n")
                 file.write("/* this file is autogenerated by metrixpp - changes will be overwritten */\n")
                 file.write("/*!\n")
                 file.write("/*!\n")
 
 
+                file.write("\\page metrics Metrics overview\n\n")
+
+                for file_data in overview_data["matrix"]:
+                    file_data[0] = "\\ref " + file_data[0]
+
+                writer = pytablewriter.MarkdownTableWriter()
+                writer.table_name = "metrics overview"
+                writer.headers = overview_data["fields"]
+                writer.value_matrix = overview_data["matrix"]
+                writer.stream = file
+                writer.write_table()
+
+                file.write("\n\n")
+
                 for path in paths:
                 for path in paths:
-                    for region in data[path]["matrix"]:
-                        if region[0] != "-" and region[0] != "__global__":
-                            region[0] = "#" + region[0]
 
 
-                    file.write("\\file {}\n".format(path))
+                    file.write("\\file {}\n\n".format(path))
                     
                     
                     writer = pytablewriter.MarkdownTableWriter()
                     writer = pytablewriter.MarkdownTableWriter()
                     writer.table_name = "metrics"
                     writer.table_name = "metrics"
-                    writer.headers = data[path]["fields"]
-                    writer.value_matrix = data[path]["matrix"]
+                    writer.headers = data[path]["file_fields"]
+                    writer.value_matrix = data[path]["file_matrix"]
+                    writer.stream = file
+                    writer.write_table()
+
+                    file.write("\n")
+
+                    for region in data[path]["region_matrix"]:
+                        if region[0] != "-" and region[0] != "__global__":
+                            region[0] = "#" + region[0]
+
+                    writer = pytablewriter.MarkdownTableWriter()
+                    writer.table_name = "region metrics"
+                    writer.headers = data[path]["region_fields"]
+                    writer.value_matrix = data[path]["region_matrix"]
                     writer.stream = file
                     writer.stream = file
                     writer.write_table()
                     writer.write_table()
 
 
@@ -86,6 +110,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
         exit_code = 0
         exit_code = 0
 
 
         data = {}
         data = {}
+        overview_data = {}
 
 
         loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
         loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
         loader = self.get_plugin('mpp.dbf').get_loader()
         loader = self.get_plugin('mpp.dbf').get_loader()
@@ -98,49 +123,71 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
 
 
         for path in paths:
         for path in paths:
             path = mpp.utils.preprocess_path(path)
             path = mpp.utils.preprocess_path(path)
-            fields = ["region"]
+            file_fields = ["region"]
             data[path] = {}
             data[path] = {}
-            data[path]["matrix"] = [["-"]]
+            data[path]["file_data"] = {}
+            data[path]["file_fields"] = []
+            data[path]["file_matrix"] = [[]]
+            data[path]["regions"] = {}
+            data[path]["region_fields"] = ["region"]
+            data[path]["region_matrix"] = []
 
 
-            for file_data in loader.iterate_file_data(path):
-                file_data.load_regions()
-                for region in file_data.regions:
-                    data[path]["matrix"].append([region.name])
-
-            aggregated_data = loader.load_aggregated_data(path)
             file_data = loader.load_file_data(path)
             file_data = loader.load_file_data(path)
 
 
-            for (i, namespace) in enumerate(aggregated_data.iterate_namespaces()):
-                for field in aggregated_data.iterate_fields(namespace):
-                    fields.append(namespace + "." + str(field[0]))
+            # load file based data
+            data_tree = file_data.get_data_tree()
+            for namespace in file_data.iterate_namespaces():
+                for field in file_data.iterate_fields(namespace):
+                    data[path]["file_data"][namespace + "." +  field[0]] = field[1]
+                    data[path]["file_fields"].append(namespace + "." +  field[0])
+   
+            for field in data[path]["file_fields"]:
+                data[path]["file_matrix"][0].append(data[path]["file_data"][field])
+
+            # load region based data
+            file_data.load_regions()
+            for region in file_data.regions:
+                data[path]["regions"][region.name] = {}
+                data_tree = region.get_data_tree()
+                for namespace in region.iterate_namespaces():
+                    for field in region.iterate_fields(namespace):
+                        data[path]["regions"][region.name][namespace + "." +  field[0]] = field[1]
+
+                        if not (namespace + "." +  field[0]) in data[path]["region_fields"]:
+                            data[path]["region_fields"].append(namespace + "." +  field[0])
             
             
-                    selected_data = loader.load_selected_data(namespace,
-                                                        fields = [field[0]],
-                                                        path=path)
-                                                    
-                    # append file information - not in a region
-                    for select_data in selected_data:
-                        if not select_data.get_region():
-                            data[path]["matrix"][0].append(select_data.get_data(namespace, field[0]))
-                            break;
+            for region in data[path]["regions"]:
+                region_row = [region]
+                
+                for field in data[path]["region_fields"][1:]:
+                    if field in data[path]["regions"][region]:
+                        region_row.append(data[path]["regions"][region][field])
                     else:
                     else:
-                        data[path]["matrix"][0].append("-")
-                    
-                    # append region data if any
-                    for region in data[path]["matrix"][1:]:
-                        for select_data in selected_data:
-                            if region == select_data.get_region():
-                                data[path]["matrix"][select_data.get_region().get_id()].append(select_data.get_data(namespace, field[0]))
-                                break
-                        else:
-                            region.append("-")
-                        
+                        region_row.append("-")
+                
+                data[path]["region_matrix"].append(region_row)
+
+            overview_data["fields"] = ["file"]
+            overview_data["matrix"] = []
+            for key, value in data.items():
+                for field in value["file_fields"]:
+                    if not field in overview_data["fields"]:
+                        overview_data["fields"].append(field)
             
             
-            data[path]["fields"] = fields
-        
+            for key, value in data.items():
+                row = [key]
+                for field in overview_data["fields"][1:]:
+                    if field in value["file_data"]:
+                        row.append(value["file_data"][field])
+                    else:
+                        row.append("-")
+                
+                overview_data["matrix"].append(row)
+                
         if self.out_format == "doxygen":
         if self.out_format == "doxygen":
             exit_code = self.create_doxygen_report(paths,
             exit_code = self.create_doxygen_report(paths,
                                                    self.out_dir,
                                                    self.out_dir,
+                                                   overview_data,
                                                    data,
                                                    data,
                                                    loader,
                                                    loader,
                                                    loader_prev)
                                                    loader_prev)