|
@@ -33,130 +33,120 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
|
|
|
def initialize(self):
|
|
|
super(Plugin, self).initialize()
|
|
|
|
|
|
- def run(self, args):
|
|
|
- return main(self, args)
|
|
|
+ def loadSubdirs(self, loader, path, subdirs, subfiles):
|
|
|
|
|
|
-def loadSubdirs(loader, path, subdirs, subfiles):
|
|
|
+ aggregated_data = loader.load_aggregated_data(path)
|
|
|
|
|
|
- aggregated_data = loader.load_aggregated_data(path)
|
|
|
+ if not aggregated_data:
|
|
|
+ return subdirs, subfiles
|
|
|
|
|
|
- if not aggregated_data:
|
|
|
- return subdirs, subfiles
|
|
|
+ for subfile in aggregated_data.get_subfiles():
|
|
|
+ subfiles.append(aggregated_data.path + "/" + subfile)
|
|
|
|
|
|
- for subfile in aggregated_data.get_subfiles():
|
|
|
- subfiles.append(aggregated_data.path + "/" + subfile)
|
|
|
+ for subdir in aggregated_data.get_subdirs():
|
|
|
+ subdir = aggregated_data.path + "/" + subdir
|
|
|
+ subdirs.append(subdir)
|
|
|
+ subdirs, subfiles = self.loadSubdirs(loader, subdir, subdirs, subfiles)
|
|
|
+ return subdirs, subfiles
|
|
|
|
|
|
- for subdir in aggregated_data.get_subdirs():
|
|
|
- subdir = aggregated_data.path + "/" + subdir
|
|
|
- subdirs.append(subdir)
|
|
|
- subdirs, subfiles = loadSubdirs(loader, subdir, subdirs, subfiles)
|
|
|
- return subdirs, subfiles
|
|
|
+ def create_doxygen_report(self, paths, output_dir, data, loader, loader_prev):
|
|
|
+
|
|
|
+ exit_code = 1
|
|
|
|
|
|
-def main(plugin, args):
|
|
|
+ if output_dir:
|
|
|
+ os.makedirs(output_dir, exist_ok=True)
|
|
|
+ with open(os.path.join(output_dir, "metrixpp.dox"), mode="w+") as file:
|
|
|
+ file.write("/* this file is autogenerated by metrixpp - changes will be overwritten */\n")
|
|
|
+ file.write("/*!\n")
|
|
|
|
|
|
- exit_code = 0
|
|
|
+ for path in paths:
|
|
|
+ for region in data[path]["matrix"]:
|
|
|
+ if region[0] != "-" and region[0] != "__global__":
|
|
|
+ region[0] = "#" + region[0]
|
|
|
|
|
|
- data = {"fileMetrixList" : {},
|
|
|
- "regionMetrixList" : [],
|
|
|
- "files" : []}
|
|
|
+ file.write("\\file {}\n".format(path))
|
|
|
+
|
|
|
+ writer = pytablewriter.MarkdownTableWriter()
|
|
|
+ writer.table_name = "metrics"
|
|
|
+ writer.headers = data[path]["fields"]
|
|
|
+ writer.value_matrix = data[path]["matrix"]
|
|
|
+ writer.stream = file
|
|
|
+ writer.write_table()
|
|
|
|
|
|
- loader_prev = plugin.get_plugin('mpp.dbf').get_loader_prev()
|
|
|
- loader = plugin.get_plugin('mpp.dbf').get_loader()
|
|
|
+ file.write("\n\n")
|
|
|
|
|
|
- paths = None
|
|
|
- if len(args) == 0:
|
|
|
- subdirs, paths = loadSubdirs(loader, ".", [], [])
|
|
|
- else:
|
|
|
- paths = args
|
|
|
+ file.write("*/\n")
|
|
|
+ exit_code = 0
|
|
|
+ else:
|
|
|
+ logging.error("no output directory set")
|
|
|
|
|
|
- for path in paths:
|
|
|
- path = mpp.utils.preprocess_path(path)
|
|
|
+ return exit_code
|
|
|
|
|
|
- aggregated_data = loader.load_aggregated_data(path)
|
|
|
- file_data = loader.load_file_data(path)
|
|
|
-
|
|
|
- for namespace in aggregated_data.iterate_namespaces():
|
|
|
- for field in aggregated_data.iterate_fields(namespace):
|
|
|
- print(field)
|
|
|
-
|
|
|
- for key in aggregated_data.data:
|
|
|
- if not key in data["fileMetrixList"]:
|
|
|
- metric = { "name" : key,
|
|
|
- "submetrics" : []}
|
|
|
- data["fileMetrixList"][key] = metric
|
|
|
- for subkey in aggregated_data.data[key]:
|
|
|
- if not subkey in data["fileMetrixList"][key]:
|
|
|
- data["fileMetrixList"][key]["submetrics"].append(subkey)
|
|
|
-
|
|
|
- file = {"path" : path,
|
|
|
- "file_id" : file_data.file_id,
|
|
|
- "regions" : [],
|
|
|
- "data" : aggregated_data.data}
|
|
|
-
|
|
|
- data["files"].append(file)
|
|
|
-
|
|
|
- for reg in file_data.iterate_regions():
|
|
|
- region = {"name" : reg.name,
|
|
|
- "region_id" : reg.region_id,
|
|
|
- "line_begin" : reg.line_begin,
|
|
|
- "data" : reg.get_data_tree()}
|
|
|
-
|
|
|
- file["regions"].append(region)
|
|
|
-
|
|
|
- for key in region["data"]:
|
|
|
- if not key in data["regionMetrixList"]:
|
|
|
- data["regionMetrixList"].append(key)
|
|
|
- for subkey in key:
|
|
|
- if not subkey in data["regionMetrixList"][key]:
|
|
|
- data["regionMetrixList"][key].append(subkey)
|
|
|
-
|
|
|
- writer = pytablewriter.ExcelXlsxTableWriter()
|
|
|
- writer.open("index.xlsx")
|
|
|
- writer.make_worksheet("files")
|
|
|
- writer.headers = ["file"] + data["fileMetrixList"]
|
|
|
-
|
|
|
- matrix = [];
|
|
|
-
|
|
|
- for file in data["files"]:
|
|
|
- line = []
|
|
|
- line.append("=HYPERLINK(\"#{0}!A1\",\"{0}\")".format(os.path.basename(file["path"])))
|
|
|
- for metric in data["fileMetrixList"]:
|
|
|
- if metric in file["data"]:
|
|
|
- for value in file["data"][metric].values():
|
|
|
- line.append(value["total"])
|
|
|
- break
|
|
|
- else:
|
|
|
- line.append("---")
|
|
|
- matrix.append(line)
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- writer.headers = ["file", "line", "name"] + data["regionMetrixList"]
|
|
|
-
|
|
|
- for file in data["files"]:
|
|
|
- writer.make_worksheet(os.path.basename(file["path"]))
|
|
|
- matrix = [];
|
|
|
- for region in file["regions"]:
|
|
|
- line = []
|
|
|
- line.append(file["path"])
|
|
|
- line.append(str(region["line_begin"]))
|
|
|
- line.append(region["name"])
|
|
|
- for metric in data["regionMetrixList"]:
|
|
|
- if metric in region["data"]:
|
|
|
- for value in region["data"][metric].values():
|
|
|
- line.append(str(value))
|
|
|
- break
|
|
|
- else:
|
|
|
- line.append("---")
|
|
|
- matrix.append(line)
|
|
|
-
|
|
|
- writer.table_name = file["path"]
|
|
|
- writer.value_matrix = matrix
|
|
|
- writer.write_table()
|
|
|
-
|
|
|
- writer.close()
|
|
|
-
|
|
|
- return exit_code
|
|
|
+ def run(self, args):
|
|
|
+ exit_code = 0
|
|
|
+
|
|
|
+ data = {}
|
|
|
+
|
|
|
+ loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
|
|
|
+ loader = self.get_plugin('mpp.dbf').get_loader()
|
|
|
+
|
|
|
+ paths = None
|
|
|
+ if len(args) == 0:
|
|
|
+ subdirs, paths = self.loadSubdirs(loader, ".", [], [])
|
|
|
+ else:
|
|
|
+ paths = args
|
|
|
+
|
|
|
+ for path in paths:
|
|
|
+ path = mpp.utils.preprocess_path(path)
|
|
|
+ fields = ["region"]
|
|
|
+ data[path] = {}
|
|
|
+ data[path]["matrix"] = [["-"]]
|
|
|
+
|
|
|
+ for file_data in loader.iterate_file_data(path):
|
|
|
+ file_data.load_regions()
|
|
|
+ for region in file_data.regions:
|
|
|
+ data[path]["matrix"].append([region.name])
|
|
|
+
|
|
|
+ aggregated_data = loader.load_aggregated_data(path)
|
|
|
+ file_data = loader.load_file_data(path)
|
|
|
+
|
|
|
+ for (i, namespace) in enumerate(aggregated_data.iterate_namespaces()):
|
|
|
+ for field in aggregated_data.iterate_fields(namespace):
|
|
|
+ fields.append(namespace + "." + str(field[0]))
|
|
|
+
|
|
|
+ selected_data = loader.load_selected_data(namespace,
|
|
|
+ fields = [field[0]],
|
|
|
+ path=path)
|
|
|
+
|
|
|
+
|
|
|
+ for select_data in selected_data:
|
|
|
+ if not select_data.get_region():
|
|
|
+ data[path]["matrix"][0].append(select_data.get_data(namespace, field[0]))
|
|
|
+ break;
|
|
|
+ else:
|
|
|
+ data[path]["matrix"][0].append("-")
|
|
|
+
|
|
|
+
|
|
|
+ for region in data[path]["matrix"][1:]:
|
|
|
+ for select_data in selected_data:
|
|
|
+ if region == select_data.get_region():
|
|
|
+ data[path]["matrix"][select_data.get_region().get_id()].append(select_data.get_data(namespace, field[0]))
|
|
|
+ break
|
|
|
+ else:
|
|
|
+ region.append("-")
|
|
|
+
|
|
|
+
|
|
|
+ data[path]["fields"] = fields
|
|
|
+
|
|
|
+ if self.out_format == "doxygen":
|
|
|
+ exit_code = self.create_doxygen_report(paths,
|
|
|
+ self.out_dir,
|
|
|
+ data,
|
|
|
+ loader,
|
|
|
+ loader_prev)
|
|
|
+ else:
|
|
|
+ logging.error("unknown or no output format set")
|
|
|
+ exit_code = 1
|
|
|
+
|
|
|
+
|
|
|
+ return exit_code
|