limit.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import logging
  20. import time
  21. import re
  22. import core.log
  23. import core.db.loader
  24. import core.db.post
  25. import core.db.utils
  26. import core.export.cout
  27. import core.warn
  28. import core.cmdparser
  29. def main():
  30. exit_code = 0
  31. log_plugin = core.log.Plugin()
  32. db_plugin = core.db.post.Plugin()
  33. warn_plugin = core.warn.Plugin()
  34. parser = core.cmdparser.MultiOptionParser(usage="Usage: %prog [options] -- [path 1] ... [path N]")
  35. log_plugin.declare_configuration(parser)
  36. db_plugin.declare_configuration(parser)
  37. warn_plugin.declare_configuration(parser)
  38. (options, args) = parser.parse_args()
  39. log_plugin.configure(options)
  40. db_plugin.configure(options)
  41. warn_plugin.configure(options)
  42. loader_prev = core.db.loader.Loader()
  43. if db_plugin.dbfile_prev != None:
  44. loader_prev.open_database(db_plugin.dbfile_prev)
  45. loader = core.db.loader.Loader()
  46. loader.open_database(db_plugin.dbfile)
  47. warn_plugin.verify_namespaces(loader.iterate_namespace_names())
  48. for each in loader.iterate_namespace_names():
  49. warn_plugin.verify_fields(each, loader.get_namespace(each).iterate_field_names())
  50. # Check for versions consistency
  51. for each in loader.iterate_properties():
  52. if db_plugin.dbfile_prev != None:
  53. prev = loader_prev.get_property(each.name)
  54. if prev != each.value:
  55. logging.warn("Previous data has got different metadata:")
  56. logging.warn(" - identification of change trends can be not reliable")
  57. logging.warn(" - use 'info' tool to get more details")
  58. break
  59. paths = None
  60. if len(args) == 0:
  61. paths = [""]
  62. else:
  63. paths = args
  64. # Try to optimise iterative change scans
  65. modified_file_ids = None
  66. if warn_plugin.mode != warn_plugin.MODE_ALL:
  67. modified_file_ids = get_list_of_modified_files(loader, loader_prev)
  68. for path in paths:
  69. logging.info("Processing: " + re.sub(r'''[\\]''', "/", path))
  70. for limit in warn_plugin.iterate_limits():
  71. logging.info("Applying limit: " + str(limit))
  72. filters = [limit.filter]
  73. if modified_file_ids != None:
  74. filters.append(('file_id', 'IN', modified_file_ids))
  75. selected_data = loader.load_selected_data(limit.namespace,
  76. fields = [limit.field],
  77. path=path,
  78. filters = filters)
  79. if selected_data == None:
  80. logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
  81. exit_code += 1
  82. continue
  83. for select_data in selected_data:
  84. is_modified = None
  85. diff = None
  86. file_data = loader.load_file_data(select_data.get_path())
  87. file_data_prev = loader_prev.load_file_data(select_data.get_path())
  88. if file_data_prev != None:
  89. if file_data.get_checksum() == file_data_prev.get_checksum():
  90. diff = 0
  91. is_modified = False
  92. else:
  93. matcher = core.db.utils.FileRegionsMatcher(file_data, file_data_prev)
  94. prev_id = matcher.get_prev_id(select_data.get_region().get_id())
  95. if matcher.is_matched(select_data.get_region().get_id()):
  96. if matcher.is_modified(select_data.get_region().get_id()):
  97. is_modified = True
  98. else:
  99. is_modified = False
  100. diff = core.db.loader.DiffData(select_data,
  101. file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
  102. if warn_plugin.is_mode_matched(limit.limit, select_data.get_data(limit.namespace, limit.field), diff, is_modified):
  103. exit_code += 1
  104. region_cursor = 0
  105. region_name = ""
  106. if select_data.get_region() != None:
  107. region_cursor = select_data.get_region().cursor
  108. region_name = select_data.get_region().name
  109. report_limit_exceeded(select_data.get_path(),
  110. region_cursor,
  111. limit.namespace,
  112. limit.field,
  113. region_name,
  114. select_data.get_data(limit.namespace, limit.field),
  115. diff,
  116. limit.limit,
  117. is_modified)
  118. return exit_code
  119. def get_list_of_modified_files(loader, loader_prev):
  120. logging.info("Identifying changed files...")
  121. old_files_map = {}
  122. for each in loader_prev.iterate_file_data():
  123. old_files_map[each.get_path()] = each.get_checksum()
  124. if len(old_files_map) == 0:
  125. return None
  126. modified_file_ids = []
  127. for each in loader.iterate_file_data():
  128. if len(modified_file_ids) > 1000: # If more than 1000 files changed, skip optimisation
  129. return None
  130. if (each.get_path() not in old_files_map.keys()) or old_files_map[each.get_path()] != each.get_checksum():
  131. modified_file_ids.append(str(each.get_id()))
  132. old_files_map = None
  133. if len(modified_file_ids) != 0:
  134. modified_file_ids = " , ".join(modified_file_ids)
  135. modified_file_ids = "(" + modified_file_ids + ")"
  136. return modified_file_ids
  137. return None
  138. def report_limit_exceeded(path, cursor, namespace, field, region_name, stat_level, trend_value, stat_limit, is_modified):
  139. message = "Metric '" + namespace + "/" + field + "' for region '" + region_name + "' exceeds the limit."
  140. details = [("Metric name", namespace + "/" + field),
  141. ("Region name", region_name),
  142. ("Metric value", stat_level),
  143. ("Modified", is_modified),
  144. ("Change trend", '{0:{1}}'.format(trend_value, '+' if trend_value else '')),
  145. ("Limit", stat_limit)]
  146. core.export.cout.cout(path, cursor, core.export.cout.SEVERITY_WARNING, message, details)
  147. if __name__ == '__main__':
  148. ts = time.time()
  149. core.log.set_default_format()
  150. exit_code = main()
  151. logging.warning("Exit code: " + str(exit_code) + ". Time spent: " + str(round((time.time() - ts), 2)) + " seconds. Done")
  152. exit(exit_code)