report.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357
  1. #
  2. # Metrix++, Copyright 2009-2024, Metrix++ Project
  3. # Link: https://github.com/metrixplusplus/metrixplusplus
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. import logging
  8. import io
  9. import os
  10. import json
  11. from metrixpp.mpp import api
  12. from metrixpp.mpp import utils
  13. DIGIT_COUNT = 8
  14. class Plugin(api.Plugin, api.IConfigurable, api.IRunable):
  15. def declare_configuration(self, parser):
  16. self.parser = parser
  17. parser.add_option("--output-dir", "--od", help="Set the output folder.")
  18. parser.add_option("--format", "--ft", default='txt', choices=['txt', 'doxygen', 'json'],
  19. help="Format of the output data. "
  20. "Possible values are 'txt', 'doxygen' or 'json' [default: %default]")
  21. def configure(self, options):
  22. self.out_dir = options.__dict__['output_dir']
  23. self.out_format = options.__dict__['format']
  24. def initialize(self):
  25. super(Plugin, self).initialize()
  26. def loadSubdirs(self, loader, path, subdirs, subfiles):
  27. aggregated_data = loader.load_aggregated_data(path)
  28. if not aggregated_data:
  29. return subdirs, subfiles
  30. for subfile in aggregated_data.get_subfiles():
  31. subfiles.append(aggregated_data.path + "/" + subfile)
  32. for subdir in aggregated_data.get_subdirs():
  33. subdir = aggregated_data.path + "/" + subdir
  34. subdirs.append(subdir)
  35. # recurse for all subdirs and subfiles
  36. subdirs, subfiles = self.loadSubdirs(loader, subdir, subdirs, subfiles)
  37. return subdirs, subfiles
  38. @staticmethod
  39. def _get_warning_text(warning, doxygen=False):
  40. warning_text = "Metric '" + warning.namespace + ":" + warning.field + "'"
  41. ref = "\\ref " if doxygen else ""
  42. if warning.region_name and warning.region_name != "__global__":
  43. warning_text = warning_text + " for region " + ref + warning.region_name
  44. elif warning.region_name == "__global__":
  45. warning_text = warning_text + " for region " + warning.region_name
  46. else:
  47. warning_text = warning_text + " for the file " + ref + warning.path
  48. warning_text = warning_text + " exceeds the limit."
  49. if warning.type == "max":
  50. warning_comp = ">"
  51. else:
  52. warning_comp = "<"
  53. warning_text = warning_text + " (value: {} {} limit: {})".format(warning.stat_level,
  54. warning_comp,
  55. warning.stat_limit)
  56. return warning_text
  57. @staticmethod
  58. def _get_warning_dict(warning):
  59. warning_dict = {}
  60. warning_dict["metric"] = warning.namespace + ":" + warning.field
  61. warning_dict["region"] = warning.region_name
  62. warning_dict["type"] = warning.type
  63. warning_dict["level"] = warning.stat_level
  64. warning_dict["limit"] = warning.stat_limit
  65. return warning_dict
  66. def _get_txt_warnings(self, warnings):
  67. warning_text = ""
  68. for warning in warnings:
  69. warning_text += self._get_warning_text(warning) + "\n"
  70. return warning_text
  71. def create_txt_report(self, paths, overview_data, data):
  72. report_text = "Overview:\n"
  73. # start with overview data
  74. for row in overview_data["matrix"]:
  75. report_text += "\n"
  76. for idx, field in enumerate(overview_data["fields"]):
  77. report_text += field + ": " + str(row[idx]) + "\n"
  78. if len(overview_data["warnings"]) > 0:
  79. report_text += "\nWarnings:\n"
  80. report_text += self._get_txt_warnings(overview_data["warnings"])
  81. # add file based data
  82. report_text += "\nFiles:\n"
  83. for path in paths:
  84. report_text += "\n" + path + "\n"
  85. for row in data[path]["file_matrix"]:
  86. for idx, field in enumerate(data[path]["file_fields"]):
  87. report_text += field + ": " + str(row[idx]) + "\n"
  88. for row in data[path]["region_matrix"]:
  89. report_text += "\n" + path + " "
  90. for idx, field in enumerate(data[path]["region_fields"]):
  91. report_text += field + ": " + str(row[idx]) + "\n"
  92. if data[path]["warnings"]:
  93. report_text += "\nWarnings for " + path + ":\n"
  94. report_text += self._get_txt_warnings(data[path]["warnings"])
  95. return report_text
  96. def create_doxygen_report(self, paths, overview_data, data):
  97. import pytablewriter
  98. result_text = ""
  99. result_text += "/* this file is autogenerated by metrix++ - changes will be overwritten */\n"
  100. result_text += "/*!\n"
  101. result_text += "\\page metrix_overview Metrix overview\n\n"
  102. result_text += "\\section metrix_sec Metrix Warnings\n\n"
  103. result_text += "Metrix Limits exceeded {} times.\n\n".format(len(overview_data["warnings"]))
  104. if len(overview_data["warnings"]) > 0:
  105. result_text += "Warning list: \\ref metrix_warnings\n\n"
  106. for file_data in overview_data["matrix"]:
  107. file_data[0] = str(file_data[0]).replace("\\", "/")
  108. writer = pytablewriter.MarkdownTableWriter()
  109. writer.table_name = "metrix overview"
  110. writer.headers = overview_data["fields"]
  111. writer.value_matrix = overview_data["matrix"]
  112. writer.margin = 1
  113. writer.stream = io.StringIO()
  114. writer.write_table()
  115. result_text += writer.stream.getvalue() + "\n\n"
  116. for path in paths:
  117. result_text += "\\file {}\n\n".format(path)
  118. writer = pytablewriter.MarkdownTableWriter()
  119. writer.table_name = "metrix"
  120. writer.headers = data[path]["file_fields"]
  121. writer.value_matrix = data[path]["file_matrix"]
  122. writer.margin = 1
  123. writer.stream = io.StringIO()
  124. writer.write_table()
  125. result_text += writer.stream.getvalue() + "\n"
  126. for region in data[path]["region_matrix"]:
  127. if region[0] != "-" and region[0] != "__global__":
  128. region[0] = "\\ref " + region[0]
  129. writer = pytablewriter.MarkdownTableWriter()
  130. writer.table_name = "region metrix"
  131. writer.headers = data[path]["region_fields"]
  132. writer.value_matrix = data[path]["region_matrix"]
  133. writer.margin = 1
  134. writer.stream = io.StringIO()
  135. writer.write_table()
  136. result_text += writer.stream.getvalue() + "\n"
  137. # add warnings as list items
  138. for warning in data[path]["warnings"]:
  139. warning_text = self._get_warning_text(warning, doxygen=True)
  140. result_text += "\\xrefitem metrix_warnings \"Metrix Warning\" \"Metrix Warnings\" {}\n".format(warning_text)
  141. result_text += "\n\n"
  142. result_text += "*/\n"
  143. return result_text
  144. def create_json_report(self, paths, overview_data, data):
  145. report_dict = {}
  146. # start with overview data
  147. overview_list = []
  148. for row in overview_data["matrix"]:
  149. overview_dict = {}
  150. for idx, field in enumerate(overview_data["fields"]):
  151. overview_dict[field] = str(row[idx])
  152. overview_list.append(overview_dict)
  153. report_dict["overview"] = overview_list
  154. report_dict["warnings"] = []
  155. for warning in overview_data["warnings"]:
  156. report_dict["warnings"].append(self._get_warning_dict(warning))
  157. # add file based data
  158. files_dict = {}
  159. for path in paths:
  160. file_dict = {}
  161. regions_dict = {}
  162. warning_list = []
  163. for row in data[path]["file_matrix"]:
  164. for idx, field in enumerate(data[path]["file_fields"]):
  165. file_dict[field] = str(row[idx])
  166. for row in data[path]["region_matrix"]:
  167. region_dict = {}
  168. for idx, field in enumerate(data[path]["region_fields"]):
  169. region_dict[field] = str(row[idx])
  170. if row[0]:
  171. regions_dict[row[0]] = region_dict
  172. else:
  173. regions_dict["__no_region__"] = region_dict
  174. for warning in data[path]["warnings"]:
  175. warning_list.append(self._get_warning_dict(warning))
  176. file_dict["regions"] = regions_dict
  177. file_dict["warnings"] = warning_list
  178. files_dict[path] = file_dict
  179. report_dict["files"] = files_dict
  180. return json.dumps(report_dict, indent=4)
  181. def run(self, args):
  182. exit_code = 0
  183. data = {}
  184. overview_data = {}
  185. loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
  186. limit_backend = self.get_plugin('std.tools.limit_backend')
  187. paths = None
  188. if len(args) == 0:
  189. subdirs, paths = self.loadSubdirs(loader, ".", [], [])
  190. else:
  191. paths = args
  192. for path in paths:
  193. path = utils.preprocess_path(path)
  194. data[path] = {}
  195. data[path]["file_data"] = {}
  196. data[path]["file_fields"] = ["warnings"]
  197. data[path]["file_matrix"] = [[]]
  198. data[path]["regions"] = {}
  199. data[path]["region_fields"] = ["region", "warnings"]
  200. data[path]["region_matrix"] = []
  201. data[path]["warnings"] = []
  202. file_data = loader.load_file_data(path)
  203. # get warnings from limit plugin
  204. data[path]["warnings"] = limit_backend.get_all_warnings(path)
  205. # convert paths to increase readability
  206. for warning in data[path]["warnings"]:
  207. warning.path = os.path.relpath(warning.path)
  208. # load file based data
  209. data_tree = file_data.get_data_tree()
  210. for namespace in file_data.iterate_namespaces():
  211. for field in file_data.iterate_fields(namespace):
  212. data[path]["file_data"][namespace + "." + field[0]] = field[1]
  213. data[path]["file_fields"].append(namespace + "." + field[0])
  214. for field in data[path]["file_fields"]:
  215. if field == "warnings":
  216. data[path]["file_matrix"][0].append(len(data[path]["warnings"]))
  217. else:
  218. data[path]["file_matrix"][0].append(data[path]["file_data"][field])
  219. # load region based data
  220. file_data.load_regions()
  221. for region in file_data.regions:
  222. data[path]["regions"][region.name] = {}
  223. data_tree = region.get_data_tree()
  224. for namespace in region.iterate_namespaces():
  225. for field in region.iterate_fields(namespace):
  226. data[path]["regions"][region.name][namespace + "." + field[0]] = field[1]
  227. if not (namespace + "." + field[0]) in data[path]["region_fields"]:
  228. data[path]["region_fields"].append(namespace + "." + field[0])
  229. # iterate over all found regions in the file
  230. for region in data[path]["regions"]:
  231. # add static columns with region name and warning count
  232. warning_count = sum(warning.region_name == region for warning in data[path]["warnings"])
  233. region_row = [region, str(warning_count)]
  234. # start iterating after the static fields
  235. for field in data[path]["region_fields"][2:]:
  236. if field in data[path]["regions"][region]:
  237. region_row.append(data[path]["regions"][region][field])
  238. else:
  239. region_row.append("-")
  240. data[path]["region_matrix"].append(region_row)
  241. # assemble overview table
  242. overview_data["warnings"] = []
  243. overview_data["fields"] = ["file", "warnings"]
  244. overview_data["matrix"] = []
  245. for key, value in data.items():
  246. for field in value["file_fields"]:
  247. if not field in overview_data["fields"]:
  248. overview_data["fields"].append(field)
  249. for key, value in data.items():
  250. overview_data["warnings"] = overview_data["warnings"] + value["warnings"]
  251. row = [os.path.relpath(key), len(value["warnings"])]
  252. for field in overview_data["fields"][2:]:
  253. if field in value["file_data"]:
  254. row.append(value["file_data"][field])
  255. else:
  256. row.append("-")
  257. overview_data["matrix"].append(row)
  258. if self.out_format == "txt":
  259. result_text = self.create_txt_report(paths,
  260. overview_data,
  261. data)
  262. filename = "metrixpp.txt"
  263. elif self.out_format == "doxygen":
  264. result_text = self.create_doxygen_report(paths,
  265. overview_data,
  266. data)
  267. filename = "metrixpp.dox"
  268. elif self.out_format == "json":
  269. result_text = self.create_json_report(paths,
  270. overview_data,
  271. data)
  272. filename = "metrixpp.json"
  273. else:
  274. logging.error("unknown or no output format set")
  275. return 1
  276. if self.out_dir:
  277. os.makedirs(self.out_dir, exist_ok=True)
  278. with open(os.path.join(self.out_dir, filename), "w+") as result_file:
  279. result_file.write(result_text)
  280. else:
  281. print(result_text)
  282. return 0