view.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import mpp.api
  20. import mpp.utils
  21. import mpp.cout
  22. class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
  23. def declare_configuration(self, parser):
  24. parser.add_option("--format", "--ft", default='txt', choices=['txt', 'xml', 'python'], help="Format of the output data. "
  25. "Possible values are 'xml', 'txt' or 'python' [default: %default]")
  26. parser.add_option("--nest-regions", "--nr", action="store_true", default=False,
  27. help="If the option is set (True), data for regions is exported in the form of a tree. "
  28. "Otherwise, all regions are exported in plain list. [default: %default]")
  29. def configure(self, options):
  30. self.out_format = options.__dict__['format']
  31. self.nest_regions = options.__dict__['nest_regions']
  32. def run(self, args):
  33. loader_prev = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader_prev()
  34. loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
  35. paths = None
  36. if len(args) == 0:
  37. paths = [""]
  38. else:
  39. paths = args
  40. (result, exit_code) = export_to_str(self.out_format, paths, loader, loader_prev, self.nest_regions)
  41. print result
  42. return exit_code
  43. def export_to_str(out_format, paths, loader, loader_prev, nest_regions):
  44. exit_code = 0
  45. result = ""
  46. if out_format == 'xml':
  47. result += "<export>\n"
  48. elif out_format == 'python':
  49. result += "{'export': ["
  50. for (ind, path) in enumerate(paths):
  51. path = mpp.utils.preprocess_path(path)
  52. aggregated_data = loader.load_aggregated_data(path)
  53. aggregated_data_tree = {}
  54. subdirs = []
  55. subfiles = []
  56. if aggregated_data != None:
  57. aggregated_data_tree = aggregated_data.get_data_tree()
  58. subdirs = aggregated_data.get_subdirs()
  59. subfiles = aggregated_data.get_subfiles()
  60. else:
  61. mpp.utils.report_bad_path(path)
  62. exit_code += 1
  63. aggregated_data_prev = loader_prev.load_aggregated_data(path)
  64. if aggregated_data_prev != None:
  65. aggregated_data_tree = append_diff(aggregated_data_tree,
  66. aggregated_data_prev.get_data_tree())
  67. file_data = loader.load_file_data(path)
  68. file_data_tree = {}
  69. if file_data != None:
  70. file_data_tree = file_data.get_data_tree()
  71. file_data_prev = loader_prev.load_file_data(path)
  72. append_regions(file_data_tree, file_data, file_data_prev, nest_regions)
  73. data = {"info": {"path": path, "id": ind + 1},
  74. "aggregated-data": aggregated_data_tree,
  75. "file-data": file_data_tree,
  76. "subdirs": subdirs,
  77. "subfiles": subfiles}
  78. if out_format == 'txt':
  79. cout_txt(data)
  80. elif out_format == 'xml':
  81. result += mpp.utils.serialize_to_xml(data, root_name = "data") + "\n"
  82. elif out_format == 'python':
  83. postfix = ""
  84. if ind < len(paths) - 1:
  85. postfix = ", "
  86. result += mpp.utils.serialize_to_python(data, root_name = "data") + postfix
  87. if out_format == 'xml':
  88. result += "</export>"
  89. elif out_format == 'python':
  90. result += "]}"
  91. return (result, exit_code)
  92. def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
  93. regions_matcher = None
  94. if file_data_prev != None:
  95. file_data_tree = append_diff(file_data_tree,
  96. file_data_prev.get_data_tree())
  97. regions_matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
  98. if nest_regions == False:
  99. regions = []
  100. for region in file_data.iterate_regions():
  101. region_data_tree = region.get_data_tree()
  102. if regions_matcher != None and regions_matcher.is_matched(region.get_id()):
  103. region_data_prev = file_data_prev.get_region(regions_matcher.get_prev_id(region.get_id()))
  104. region_data_tree = append_diff(region_data_tree,
  105. region_data_prev.get_data_tree())
  106. regions.append({"info": {"name" : region.name,
  107. 'type' : file_data.get_region_types()().to_str(region.get_type()),
  108. "cursor" : region.cursor,
  109. 'line_begin': region.line_begin,
  110. 'line_end': region.line_end,
  111. 'offset_begin': region.begin,
  112. 'offset_end': region.end},
  113. "data": region_data_tree})
  114. file_data_tree['regions'] = regions
  115. else:
  116. def append_rec(region_id, file_data_tree, file_data, file_data_prev):
  117. region = file_data.get_region(region_id)
  118. region_data_tree = region.get_data_tree()
  119. if regions_matcher != None and regions_matcher.is_matched(region.get_id()):
  120. region_data_prev = file_data_prev.get_region(regions_matcher.get_prev_id(region.get_id()))
  121. region_data_tree = append_diff(region_data_tree,
  122. region_data_prev.get_data_tree())
  123. result = {"info": {"name" : region.name,
  124. 'type' : file_data.get_region_types()().to_str(region.get_type()),
  125. "cursor" : region.cursor,
  126. 'line_begin': region.line_begin,
  127. 'line_end': region.line_end,
  128. 'offset_begin': region.begin,
  129. 'offset_end': region.end},
  130. "data": region_data_tree,
  131. "subregions": []}
  132. for sub_id in file_data.get_region(region_id).iterate_subregion_ids():
  133. result['subregions'].append(append_rec(sub_id, file_data_tree, file_data, file_data_prev))
  134. return result
  135. file_data_tree['regions'] = []
  136. file_data_tree['regions'].append(append_rec(1, file_data_tree, file_data, file_data_prev))
  137. def append_diff(main_tree, prev_tree):
  138. assert(main_tree != None)
  139. assert(prev_tree != None)
  140. for name in main_tree.keys():
  141. if name not in prev_tree.keys():
  142. continue
  143. for field in main_tree[name].keys():
  144. if field not in prev_tree[name].keys():
  145. continue
  146. if isinstance(main_tree[name][field], dict) and isinstance(prev_tree[name][field], dict):
  147. diff = {}
  148. for key in main_tree[name][field].keys():
  149. if key not in prev_tree[name][field].keys():
  150. continue
  151. main_val = main_tree[name][field][key]
  152. prev_val = prev_tree[name][field][key]
  153. if main_val == None:
  154. main_val = 0
  155. if prev_val == None:
  156. prev_val = 0
  157. if isinstance(main_val, list) and isinstance(prev_val, list):
  158. main_tree[name][field][key] = append_diff_list(main_val, prev_val)
  159. else:
  160. diff[key] = main_val - prev_val
  161. main_tree[name][field]['__diff__'] = diff
  162. elif (not isinstance(main_tree[name][field], dict)) and (not isinstance(prev_tree[name][field], dict)):
  163. if '__diff__' not in main_tree[name]:
  164. main_tree[name]['__diff__'] = {}
  165. main_tree[name]['__diff__'][field] = main_tree[name][field] - prev_tree[name][field]
  166. return main_tree
  167. def append_diff_list(main_list, prev_list):
  168. merged_list = {}
  169. for bar in main_list:
  170. merged_list[bar['metric']] = {'count': bar['count'], '__diff__':0, 'ratio': bar['ratio']}
  171. for bar in prev_list:
  172. if bar['metric'] in merged_list.keys():
  173. merged_list[bar['metric']]['__diff__'] = \
  174. merged_list[bar['metric']]['count'] - bar['count']
  175. else:
  176. merged_list[bar['metric']] = {'count': 0, '__diff__':-bar['count'], 'ratio': 0}
  177. result = []
  178. for metric in sorted(merged_list.keys()):
  179. result.append({'metric':metric,
  180. 'count':merged_list[metric]['count'],
  181. 'ratio':merged_list[metric]['ratio'],
  182. '__diff__':merged_list[metric]['__diff__']})
  183. return result
  184. def cout_txt_regions(path, regions, indent = 0):
  185. for region in regions:
  186. details = [
  187. ('Region name', region['info']['name']),
  188. ('Region type', region['info']['type']),
  189. ('Offsets', str(region['info']['offset_begin']) + "-" + str(region['info']['offset_end'])),
  190. ('Line numbers', str(region['info']['line_begin']) + "-" + str(region['info']['line_end']))
  191. ]
  192. for namespace in region['data'].keys():
  193. diff_data = {}
  194. if '__diff__' in region['data'][namespace].keys():
  195. diff_data = region['data'][namespace]['__diff__']
  196. for field in region['data'][namespace].keys():
  197. diff_str = ""
  198. if field == '__diff__':
  199. continue
  200. if field in diff_data.keys():
  201. diff_str = " [" + ("+" if diff_data[field] >= 0 else "") + str(diff_data[field]) + "]"
  202. details.append((namespace + ":" + field, str(region['data'][namespace][field]) + diff_str))
  203. mpp.cout.notify(path,
  204. region['info']['cursor'],
  205. mpp.cout.SEVERITY_INFO,
  206. "Metrics per '" + region['info']['name']+ "' region",
  207. details,
  208. indent=indent)
  209. if 'subregions' in region.keys():
  210. cout_txt_regions(path, region['subregions'], indent=indent+1)
  211. def cout_txt(data):
  212. details = []
  213. for key in data['file-data'].keys():
  214. if key == 'regions':
  215. cout_txt_regions(data['info']['path'], data['file-data'][key])
  216. else:
  217. namespace = key
  218. diff_data = {}
  219. if '__diff__' in data['file-data'][namespace].keys():
  220. diff_data = data['file-data'][namespace]['__diff__']
  221. for field in data['file-data'][namespace].keys():
  222. diff_str = ""
  223. if field == '__diff__':
  224. continue
  225. if field in diff_data.keys():
  226. diff_str = " [" + ("+" if diff_data[field] >= 0 else "") + str(diff_data[field]) + "]"
  227. details.append((namespace + ":" + field, str(data['file-data'][namespace][field]) + diff_str))
  228. if len(details) > 0:
  229. mpp.cout.notify(data['info']['path'],
  230. 0,
  231. mpp.cout.SEVERITY_INFO,
  232. "Metrics per file",
  233. details)
  234. attr_map = {'count': 'Measured',
  235. 'total': 'Total',
  236. 'avg': 'Average',
  237. 'min': 'Minimum',
  238. 'max': 'Maximum'}
  239. for namespace in data['aggregated-data'].keys():
  240. for field in data['aggregated-data'][namespace].keys():
  241. details = []
  242. diff_data = {}
  243. if '__diff__' in data['aggregated-data'][namespace][field].keys():
  244. diff_data = data['aggregated-data'][namespace][field]['__diff__']
  245. for attr in data['aggregated-data'][namespace][field].keys():
  246. diff_str = ""
  247. if attr == 'distribution-bars' or attr == '__diff__' or attr == 'count':
  248. continue
  249. if attr in diff_data.keys():
  250. diff_str = " [" + ("+" if diff_data[attr] >= 0 else "") + str(diff_data[attr]) + "]"
  251. details.append((attr_map[attr], str(data['aggregated-data'][namespace][field][attr]) + diff_str))
  252. measured = data['aggregated-data'][namespace][field]['count']
  253. if 'count' in diff_data.keys():
  254. diff_str = ' [{0:{1}}]'.format(diff_data['count'], '+' if diff_data['count'] >= 0 else '')
  255. count_str_len = len(str(measured))
  256. details.append(('Distribution', str(measured) + diff_str + ' files/regions measured'))
  257. details.append((' Metric value', 'Ratio : Number of files/regions'))
  258. for bar in data['aggregated-data'][namespace][field]['distribution-bars']:
  259. diff_str = ""
  260. if '__diff__' in bar.keys():
  261. diff_str = ' [{0:{1}}]'.format(bar['__diff__'], '+' if bar['__diff__'] >= 0 else '')
  262. if isinstance(bar['metric'], float):
  263. metric_str = "{0:.4f}".format(bar['metric'])
  264. else:
  265. metric_str = str(bar['metric'])
  266. metric_str = (" " * (mpp.cout.DETAILS_OFFSET - len(metric_str) - 1)) + metric_str
  267. count_str = str(bar['count'])
  268. count_str = ((" " * (count_str_len - len(count_str))) + count_str + diff_str + "\t")
  269. details.append((metric_str,
  270. "{0:.3f}".format(bar['ratio']) + " : " + count_str + ('|' * int(round(bar['ratio']*100)))))
  271. mpp.cout.notify(data['info']['path'],
  272. '', # no line number
  273. mpp.cout.SEVERITY_INFO,
  274. "Overall metrics for '" + namespace + ":" + field + "' metric",
  275. details)
  276. details = []
  277. for each in data['subdirs']:
  278. details.append(('Directory', each))
  279. for each in data['subfiles']:
  280. details.append(('File', each))
  281. if len(details) > 0:
  282. mpp.cout.notify(data['info']['path'],
  283. '', # no line number
  284. mpp.cout.SEVERITY_INFO,
  285. "Directory content:",
  286. details)