report.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290
  1. #
  2. # Metrix++, Copyright 2009-2024, Metrix++ Project
  3. # Link: https://github.com/metrixplusplus/metrixplusplus
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. import logging
  8. import os
  9. import pytablewriter
  10. from metrixpp.mpp import api
  11. from metrixpp.mpp import utils
  12. DIGIT_COUNT = 8
  13. class Plugin(api.Plugin, api.IConfigurable, api.IRunable):
  14. def declare_configuration(self, parser):
  15. self.parser = parser
  16. parser.add_option("--output-dir", "--od", default='./metrixpp/',
  17. help="Set the output folder. [default: %default].")
  18. parser.add_option("--format", "--ft", default='txt', choices=['txt', 'doxygen'],
  19. help="Format of the output data. "
  20. "Possible values are 'txt' or 'doxygen' [default: %default]")
  21. def configure(self, options):
  22. self.out_dir = options.__dict__['output_dir']
  23. self.out_format = options.__dict__['format']
  24. def initialize(self):
  25. super(Plugin, self).initialize()
  26. def loadSubdirs(self, loader, path, subdirs, subfiles):
  27. aggregated_data = loader.load_aggregated_data(path)
  28. if not aggregated_data:
  29. return subdirs, subfiles
  30. for subfile in aggregated_data.get_subfiles():
  31. subfiles.append(aggregated_data.path + "/" + subfile)
  32. for subdir in aggregated_data.get_subdirs():
  33. subdir = aggregated_data.path + "/" + subdir
  34. subdirs.append(subdir)
  35. # recurse for all subdirs and subfiles
  36. subdirs, subfiles = self.loadSubdirs(loader, subdir, subdirs, subfiles)
  37. return subdirs, subfiles
  38. @staticmethod
  39. def _get_warning_text(warning):
  40. warning_text = "Metric '" + warning.namespace + ":" + warning.field + "'"
  41. if warning.region_name and warning.region_name != "__global__":
  42. warning_text = warning_text + " for region \\ref " + warning.region_name
  43. elif warning.region_name == "__global__":
  44. warning_text = warning_text + " for region " + warning.region_name
  45. else:
  46. warning_text = warning_text + " for the file \\ref " + warning.path
  47. warning_text = warning_text + " exceeds the limit."
  48. if warning.type == "max":
  49. warning_comp = ">"
  50. else:
  51. warning_comp = "<"
  52. warning_text = warning_text + " (value: {} {} limit: {})".format(warning.stat_level,
  53. warning_comp,
  54. warning.stat_limit)
  55. return warning_text
  56. def _get_txt_warnings(self, warnings):
  57. warning_text = ""
  58. for warning in warnings:
  59. warning_text += self._get_warning_text(warning) + "\n"
  60. return warning_text
  61. def create_txt_report(self, paths, overview_data, data):
  62. report_text = "Overview:\n"
  63. # start with overview data
  64. for row in overview_data["matrix"]:
  65. report_text += "\n"
  66. for idx, field in enumerate(overview_data["fields"]):
  67. report_text += field + ": " + str(row[idx]) + "\n"
  68. if len(overview_data["warnings"]) > 0:
  69. report_text += "\nWarnings:\n"
  70. report_text += self._get_txt_warnings(overview_data["warnings"])
  71. # add file based data
  72. report_text += "\nFiles:\n"
  73. for path in paths:
  74. report_text += "\n" + path + "\n"
  75. for row in data[path]["file_matrix"]:
  76. for idx, field in enumerate(data[path]["file_fields"]):
  77. report_text += field + ": " + str(row[idx]) + "\n"
  78. for row in data[path]["region_matrix"]:
  79. report_text += "\n" + path + " "
  80. for idx, field in enumerate(data[path]["region_fields"]):
  81. report_text += field + ": " + str(row[idx]) + "\n"
  82. if data[path]["warnings"]:
  83. report_text += "\nWarnings for " + path + ":\n"
  84. report_text += self._get_txt_warnings(data[path]["warnings"])
  85. return report_text
  86. def create_doxygen_report(self, paths, output_dir, overview_data, data):
  87. exit_code = 1
  88. if output_dir:
  89. os.makedirs(output_dir, exist_ok=True)
  90. with open(os.path.join(output_dir, "metrixpp.dox"), mode="w+") as file:
  91. file.write("/* this file is autogenerated by metrix++ - changes will be overwritten */\n")
  92. file.write("/*!\n")
  93. file.write("\\page metrix_overview Metrix overview\n\n")
  94. file.write("\\section metrix_sec Metrix Warnings\n\n")
  95. file.write("Metrix Limits exceeded {} times.\n\n".format(len(overview_data["warnings"])))
  96. if len(overview_data["warnings"]) > 0:
  97. file.write("Warning list: \\ref metrix_warnings\n\n")
  98. for file_data in overview_data["matrix"]:
  99. file_data[0] = str(file_data[0]).replace("\\", "/")
  100. writer = pytablewriter.MarkdownTableWriter()
  101. writer.table_name = "metrix overview"
  102. writer.headers = overview_data["fields"]
  103. writer.value_matrix = overview_data["matrix"]
  104. writer.margin = 1
  105. writer.stream = file
  106. writer.write_table()
  107. file.write("\n\n")
  108. for path in paths:
  109. file.write("\\file {}\n\n".format(path))
  110. writer = pytablewriter.MarkdownTableWriter()
  111. writer.table_name = "metrix"
  112. writer.headers = data[path]["file_fields"]
  113. writer.value_matrix = data[path]["file_matrix"]
  114. writer.margin = 1
  115. writer.stream = file
  116. writer.write_table()
  117. file.write("\n")
  118. for region in data[path]["region_matrix"]:
  119. if region[0] != "-" and region[0] != "__global__":
  120. region[0] = "\\ref " + region[0]
  121. writer = pytablewriter.MarkdownTableWriter()
  122. writer.table_name = "region metrix"
  123. writer.headers = data[path]["region_fields"]
  124. writer.value_matrix = data[path]["region_matrix"]
  125. writer.margin = 1
  126. writer.stream = file
  127. writer.write_table()
  128. file.write("\n")
  129. # add warnings as list items
  130. for warning in data[path]["warnings"]:
  131. warning_text = self._get_warning_text(warning)
  132. file.write("\\xrefitem metrix_warnings \"Metrix Warning\" \"Metrix Warnings\" {}\n".format(warning_text))
  133. file.write("\n\n")
  134. file.write("*/\n")
  135. exit_code = 0
  136. else:
  137. logging.error("no output directory set")
  138. return exit_code
  139. def run(self, args):
  140. exit_code = 0
  141. data = {}
  142. overview_data = {}
  143. loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
  144. limit_backend = self.get_plugin('std.tools.limit_backend')
  145. paths = None
  146. if len(args) == 0:
  147. subdirs, paths = self.loadSubdirs(loader, ".", [], [])
  148. else:
  149. paths = args
  150. for path in paths:
  151. path = utils.preprocess_path(path)
  152. data[path] = {}
  153. data[path]["file_data"] = {}
  154. data[path]["file_fields"] = ["warnings"]
  155. data[path]["file_matrix"] = [[]]
  156. data[path]["regions"] = {}
  157. data[path]["region_fields"] = ["region", "warnings"]
  158. data[path]["region_matrix"] = []
  159. data[path]["warnings"] = []
  160. file_data = loader.load_file_data(path)
  161. # get warnings from limit plugin
  162. data[path]["warnings"] = limit_backend.get_all_warnings(path)
  163. # convert paths to increase readability
  164. for warning in data[path]["warnings"]:
  165. warning.path = os.path.relpath(warning.path)
  166. # load file based data
  167. data_tree = file_data.get_data_tree()
  168. for namespace in file_data.iterate_namespaces():
  169. for field in file_data.iterate_fields(namespace):
  170. data[path]["file_data"][namespace + "." + field[0]] = field[1]
  171. data[path]["file_fields"].append(namespace + "." + field[0])
  172. for field in data[path]["file_fields"]:
  173. if field == "warnings":
  174. data[path]["file_matrix"][0].append(len(data[path]["warnings"]))
  175. else:
  176. data[path]["file_matrix"][0].append(data[path]["file_data"][field])
  177. # load region based data
  178. file_data.load_regions()
  179. for region in file_data.regions:
  180. data[path]["regions"][region.name] = {}
  181. data_tree = region.get_data_tree()
  182. for namespace in region.iterate_namespaces():
  183. for field in region.iterate_fields(namespace):
  184. data[path]["regions"][region.name][namespace + "." + field[0]] = field[1]
  185. if not (namespace + "." + field[0]) in data[path]["region_fields"]:
  186. data[path]["region_fields"].append(namespace + "." + field[0])
  187. # iterate over all found regions in the file
  188. for region in data[path]["regions"]:
  189. # add static columns with region name and warning count
  190. warning_count = sum(warning.region_name == region for warning in data[path]["warnings"])
  191. region_row = [region, str(warning_count)]
  192. # start iterating after the static fields
  193. for field in data[path]["region_fields"][2:]:
  194. if field in data[path]["regions"][region]:
  195. region_row.append(data[path]["regions"][region][field])
  196. else:
  197. region_row.append("-")
  198. data[path]["region_matrix"].append(region_row)
  199. # assemble overview table
  200. overview_data["warnings"] = []
  201. overview_data["fields"] = ["file", "warnings"]
  202. overview_data["matrix"] = []
  203. for key, value in data.items():
  204. for field in value["file_fields"]:
  205. if not field in overview_data["fields"]:
  206. overview_data["fields"].append(field)
  207. for key, value in data.items():
  208. overview_data["warnings"] = overview_data["warnings"] + value["warnings"]
  209. row = [os.path.relpath(key), len(value["warnings"])]
  210. for field in overview_data["fields"][2:]:
  211. if field in value["file_data"]:
  212. row.append(value["file_data"][field])
  213. else:
  214. row.append("-")
  215. overview_data["matrix"].append(row)
  216. if self.out_format == "txt":
  217. result_text = self.create_txt_report(paths,
  218. overview_data,
  219. data)
  220. filename = "metrixpp.txt"
  221. elif self.out_format == "doxygen":
  222. exit_code = self.create_doxygen_report(paths,
  223. self.out_dir,
  224. overview_data,
  225. data)
  226. else:
  227. logging.error("unknown or no output format set")
  228. exit_code = 1
  229. return exit_code