limit.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import logging
  20. import mpp.api
  21. import mpp.utils
  22. import mpp.cout
  23. class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
  24. def __init__(self):
  25. pass
  26. def declare_configuration(self, parser):
  27. parser.add_option("--hotspots", "--hs", default=None, help="If not set (none), all exceeded limits are printed."
  28. " If set, exceeded limits are sorted (the worst is the first) and only first HOTSPOTS limits are printed."
  29. " [default: %default]", type=int)
  30. parser.add_option("--disable-suppressions", "--ds", action="store_true", default=False,
  31. help = "If not set (none), all suppressions are ignored"
  32. " and associated warnings are printed. [default: %default]")
  33. def configure(self, options):
  34. self.hotspots = options.__dict__['hotspots']
  35. self.no_suppress = options.__dict__['disable_suppressions']
  36. def run(self, args):
  37. return main(self, args)
  38. def main(plugin, args):
  39. exit_code = 0
  40. loader_prev = plugin.get_plugin_loader().get_plugin('mpp.dbf').get_loader_prev()
  41. loader = plugin.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
  42. warn_plugin = plugin.get_plugin_loader().get_plugin('mpp.warn')
  43. paths = None
  44. if len(args) == 0:
  45. paths = [""]
  46. else:
  47. paths = args
  48. # Try to optimise iterative change scans
  49. modified_file_ids = None
  50. if warn_plugin.mode != warn_plugin.MODE_ALL:
  51. modified_file_ids = get_list_of_modified_files(loader, loader_prev)
  52. for path in paths:
  53. path = mpp.utils.preprocess_path(path)
  54. for limit in warn_plugin.iterate_limits():
  55. logging.info("Applying limit: " + str(limit))
  56. filters = [limit.filter]
  57. if modified_file_ids != None:
  58. filters.append(('file_id', 'IN', modified_file_ids))
  59. sort_by = None
  60. limit_by = None
  61. if plugin.hotspots != None:
  62. sort_by = limit.field
  63. if limit.type == "max":
  64. sort_by = "-" + sort_by
  65. limit_by = plugin.hotspots
  66. selected_data = loader.load_selected_data(limit.namespace,
  67. fields = [limit.field],
  68. path=path,
  69. filters = filters,
  70. sort_by=sort_by,
  71. limit_by=limit_by)
  72. if selected_data == None:
  73. mpp.utils.report_bad_path(path)
  74. exit_code += 1
  75. continue
  76. for select_data in selected_data:
  77. is_modified = None
  78. diff = None
  79. file_data = loader.load_file_data(select_data.get_path())
  80. file_data_prev = loader_prev.load_file_data(select_data.get_path())
  81. if file_data_prev != None:
  82. if file_data.get_checksum() == file_data_prev.get_checksum():
  83. diff = 0
  84. is_modified = False
  85. else:
  86. matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
  87. prev_id = matcher.get_prev_id(select_data.get_region().get_id())
  88. if matcher.is_matched(select_data.get_region().get_id()):
  89. if matcher.is_modified(select_data.get_region().get_id()):
  90. is_modified = True
  91. else:
  92. is_modified = False
  93. diff = mpp.api.DiffData(select_data,
  94. file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
  95. if (warn_plugin.is_mode_matched(limit.limit,
  96. select_data.get_data(limit.namespace, limit.field),
  97. diff,
  98. is_modified) == False):
  99. continue
  100. is_sup = is_metric_suppressed(limit.namespace, limit.field, loader, select_data)
  101. if is_sup == True and plugin.no_suppress == False:
  102. continue
  103. exit_code += 1
  104. region_cursor = 0
  105. region_name = None
  106. if select_data.get_region() != None:
  107. region_cursor = select_data.get_region().cursor
  108. region_name = select_data.get_region().name
  109. report_limit_exceeded(select_data.get_path(),
  110. region_cursor,
  111. limit.namespace,
  112. limit.field,
  113. region_name,
  114. select_data.get_data(limit.namespace, limit.field),
  115. diff,
  116. limit.limit,
  117. is_modified,
  118. is_sup)
  119. return exit_code
  120. def get_list_of_modified_files(loader, loader_prev):
  121. logging.info("Identifying changed files...")
  122. old_files_map = {}
  123. for each in loader_prev.iterate_file_data():
  124. old_files_map[each.get_path()] = each.get_checksum()
  125. if len(old_files_map) == 0:
  126. return None
  127. modified_file_ids = []
  128. for each in loader.iterate_file_data():
  129. if len(modified_file_ids) > 1000: # If more than 1000 files changed, skip optimisation
  130. return None
  131. if (each.get_path() not in old_files_map.keys()) or old_files_map[each.get_path()] != each.get_checksum():
  132. modified_file_ids.append(str(each.get_id()))
  133. old_files_map = None
  134. if len(modified_file_ids) != 0:
  135. modified_file_ids = " , ".join(modified_file_ids)
  136. modified_file_ids = "(" + modified_file_ids + ")"
  137. return modified_file_ids
  138. return None
  139. def is_metric_suppressed(metric_namespace, metric_field, loader, select_data):
  140. data = loader.load_file_data(select_data.get_path())
  141. if select_data.get_region() != None:
  142. data = data.get_region(select_data.get_region().get_id())
  143. sup_data = data.get_data('std.suppress', 'list')
  144. else:
  145. sup_data = data.get_data('std.suppress.file', 'list')
  146. if sup_data != None and sup_data.find('[' + metric_namespace + ':' + metric_field + ']') != -1:
  147. return True
  148. return False
  149. def report_limit_exceeded(path, cursor, namespace, field, region_name,
  150. stat_level, trend_value, stat_limit,
  151. is_modified, is_suppressed):
  152. if region_name != None:
  153. message = "Metric '" + namespace + ":" + field + "' for region '" + region_name + "' exceeds the limit."
  154. else:
  155. message = "Metric '" + namespace + ":" + field + "' exceeds the limit."
  156. details = [("Metric name", namespace + ":" + field),
  157. ("Region name", region_name),
  158. ("Metric value", stat_level),
  159. ("Modified", is_modified),
  160. ("Change trend", '{0:{1}}'.format(trend_value, '+' if trend_value else '')),
  161. ("Limit", stat_limit),
  162. ("Suppressed", is_suppressed)]
  163. mpp.cout.notify(path, cursor, mpp.cout.SEVERITY_WARNING, message, details)