limit.py 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import logging
  20. import re
  21. import core.log
  22. import core.db.loader
  23. import core.db.post
  24. import core.db.utils
  25. import core.export.cout
  26. import core.warn
  27. import core.cmdparser
  28. import core.api
  29. class Tool(core.api.ITool):
  30. def run(self, tool_args):
  31. return main(tool_args)
  32. def main(tool_args):
  33. exit_code = 0
  34. log_plugin = core.log.Plugin()
  35. db_plugin = core.db.post.Plugin()
  36. warn_plugin = core.warn.Plugin()
  37. parser = core.cmdparser.MultiOptionParser(usage="Usage: %prog limit [options] -- [path 1] ... [path N]")
  38. log_plugin.declare_configuration(parser)
  39. db_plugin.declare_configuration(parser)
  40. warn_plugin.declare_configuration(parser)
  41. parser.add_option("--general.hotspots", default=None, help="If not set (none), all exceeded limits are printed."
  42. " If set, exceeded limits are sorted (the worst is the first) and only first GENERAL.HOTSPOTS limits are printed."
  43. " [default: %default]", type=int)
  44. (options, args) = parser.parse_args(tool_args)
  45. log_plugin.configure(options)
  46. db_plugin.configure(options)
  47. warn_plugin.configure(options)
  48. hotspots = options.__dict__['general.hotspots']
  49. loader_prev = core.db.loader.Loader()
  50. if db_plugin.dbfile_prev != None:
  51. loader_prev.open_database(db_plugin.dbfile_prev)
  52. loader = core.db.loader.Loader()
  53. loader.open_database(db_plugin.dbfile)
  54. warn_plugin.verify_namespaces(loader.iterate_namespace_names())
  55. for each in loader.iterate_namespace_names():
  56. warn_plugin.verify_fields(each, loader.get_namespace(each).iterate_field_names())
  57. # Check for versions consistency
  58. for each in loader.iterate_properties():
  59. if db_plugin.dbfile_prev != None:
  60. prev = loader_prev.get_property(each.name)
  61. if prev != each.value:
  62. logging.warn("Previous data has got different metadata:")
  63. logging.warn(" - identification of change trends can be not reliable")
  64. logging.warn(" - use 'info' tool to get more details")
  65. break
  66. paths = None
  67. if len(args) == 0:
  68. paths = [""]
  69. else:
  70. paths = args
  71. # Try to optimise iterative change scans
  72. modified_file_ids = None
  73. if warn_plugin.mode != warn_plugin.MODE_ALL:
  74. modified_file_ids = get_list_of_modified_files(loader, loader_prev)
  75. for path in paths:
  76. logging.info("Processing: " + re.sub(r'''[\\]''', "/", path))
  77. for limit in warn_plugin.iterate_limits():
  78. logging.info("Applying limit: " + str(limit))
  79. filters = [limit.filter]
  80. if modified_file_ids != None:
  81. filters.append(('file_id', 'IN', modified_file_ids))
  82. sort_by = None
  83. limit_by = None
  84. if hotspots != None:
  85. sort_by = limit.field
  86. if limit.type == "max":
  87. sort_by = "-" + sort_by
  88. limit_by = hotspots
  89. selected_data = loader.load_selected_data(limit.namespace,
  90. fields = [limit.field],
  91. path=path,
  92. filters = filters,
  93. sort_by=sort_by,
  94. limit_by=limit_by)
  95. if selected_data == None:
  96. logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
  97. exit_code += 1
  98. continue
  99. for select_data in selected_data:
  100. is_modified = None
  101. diff = None
  102. file_data = loader.load_file_data(select_data.get_path())
  103. file_data_prev = loader_prev.load_file_data(select_data.get_path())
  104. if file_data_prev != None:
  105. if file_data.get_checksum() == file_data_prev.get_checksum():
  106. diff = 0
  107. is_modified = False
  108. else:
  109. matcher = core.db.utils.FileRegionsMatcher(file_data, file_data_prev)
  110. prev_id = matcher.get_prev_id(select_data.get_region().get_id())
  111. if matcher.is_matched(select_data.get_region().get_id()):
  112. if matcher.is_modified(select_data.get_region().get_id()):
  113. is_modified = True
  114. else:
  115. is_modified = False
  116. diff = core.db.loader.DiffData(select_data,
  117. file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
  118. if warn_plugin.is_mode_matched(limit.limit, select_data.get_data(limit.namespace, limit.field), diff, is_modified):
  119. exit_code += 1
  120. region_cursor = 0
  121. region_name = ""
  122. if select_data.get_region() != None:
  123. region_cursor = select_data.get_region().cursor
  124. region_name = select_data.get_region().name
  125. report_limit_exceeded(select_data.get_path(),
  126. region_cursor,
  127. limit.namespace,
  128. limit.field,
  129. region_name,
  130. select_data.get_data(limit.namespace, limit.field),
  131. diff,
  132. limit.limit,
  133. is_modified)
  134. return exit_code
  135. def get_list_of_modified_files(loader, loader_prev):
  136. logging.info("Identifying changed files...")
  137. old_files_map = {}
  138. for each in loader_prev.iterate_file_data():
  139. old_files_map[each.get_path()] = each.get_checksum()
  140. if len(old_files_map) == 0:
  141. return None
  142. modified_file_ids = []
  143. for each in loader.iterate_file_data():
  144. if len(modified_file_ids) > 1000: # If more than 1000 files changed, skip optimisation
  145. return None
  146. if (each.get_path() not in old_files_map.keys()) or old_files_map[each.get_path()] != each.get_checksum():
  147. modified_file_ids.append(str(each.get_id()))
  148. old_files_map = None
  149. if len(modified_file_ids) != 0:
  150. modified_file_ids = " , ".join(modified_file_ids)
  151. modified_file_ids = "(" + modified_file_ids + ")"
  152. return modified_file_ids
  153. return None
  154. def report_limit_exceeded(path, cursor, namespace, field, region_name, stat_level, trend_value, stat_limit, is_modified):
  155. message = "Metric '" + namespace + "/" + field + "' for region '" + region_name + "' exceeds the limit."
  156. details = [("Metric name", namespace + "/" + field),
  157. ("Region name", region_name),
  158. ("Metric value", stat_level),
  159. ("Modified", is_modified),
  160. ("Change trend", '{0:{1}}'.format(trend_value, '+' if trend_value else '')),
  161. ("Limit", stat_limit)]
  162. core.export.cout.cout(path, cursor, core.export.cout.SEVERITY_WARNING, message, details)