limit.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import logging
  20. import time
  21. import core.log
  22. import core.db.loader
  23. import core.db.post
  24. import core.db.utils
  25. import core.export.cout
  26. import core.warn
  27. import core.cmdparser
  28. def main():
  29. exit_code = 0
  30. log_plugin = core.log.Plugin()
  31. db_plugin = core.db.post.Plugin()
  32. warn_plugin = core.warn.Plugin()
  33. parser = core.cmdparser.MultiOptionParser(usage="Usage: %prog [options] -- <path 1> ... <path N>")
  34. log_plugin.declare_configuration(parser)
  35. db_plugin.declare_configuration(parser)
  36. warn_plugin.declare_configuration(parser)
  37. (options, args) = parser.parse_args()
  38. log_plugin.configure(options)
  39. db_plugin.configure(options)
  40. warn_plugin.configure(options)
  41. loader_prev = core.db.loader.Loader()
  42. if db_plugin.dbfile_prev != None:
  43. loader_prev.open_database(db_plugin.dbfile_prev)
  44. loader = core.db.loader.Loader()
  45. loader.open_database(db_plugin.dbfile)
  46. warn_plugin.verify_namespaces(loader.iterate_namespace_names())
  47. for each in loader.iterate_namespace_names():
  48. warn_plugin.verify_fields(each, loader.get_namespace(each).iterate_field_names())
  49. # Check for versions consistency
  50. for each in loader.iterate_properties():
  51. if db_plugin.dbfile_prev != None:
  52. prev = loader_prev.get_property(each.name)
  53. if prev != each.value:
  54. logging.warn("Previous data has got different metadata:")
  55. logging.warn(" - identification of change trends can be not reliable")
  56. logging.warn(" - use 'info' tool to get more details")
  57. break
  58. paths = None
  59. if len(args) == 0:
  60. paths = [""]
  61. else:
  62. paths = args
  63. # Try to optimise iterative change scans
  64. modified_file_ids = None
  65. if warn_plugin.mode != warn_plugin.MODE_ALL:
  66. modified_file_ids = get_list_of_modified_files(loader, loader_prev)
  67. for path in paths:
  68. logging.info("Processing: " + path)
  69. for limit in warn_plugin.iterate_limits():
  70. logging.info("Applying limit: " + str(limit))
  71. filters = [limit.filter]
  72. if modified_file_ids != None:
  73. filters.append(('file_id', 'IN', modified_file_ids))
  74. selected_data = loader.load_selected_data(limit.namespace,
  75. fields = [limit.field],
  76. path=path,
  77. filters = filters)
  78. if selected_data == None:
  79. logging.error("Specified path '" + path + "' is invalid (not found in the database records)")
  80. exit_code += 1
  81. continue
  82. for select_data in selected_data:
  83. is_modified = None
  84. diff = None
  85. file_data = loader.load_file_data(select_data.get_path())
  86. file_data_prev = loader_prev.load_file_data(select_data.get_path())
  87. if file_data_prev != None:
  88. if file_data.get_checksum() == file_data_prev.get_checksum():
  89. diff = 0
  90. is_modified = False
  91. else:
  92. matcher = core.db.utils.FileRegionsMatcher(file_data, file_data_prev)
  93. prev_id = matcher.get_prev_id(select_data.get_region().get_id())
  94. if matcher.is_matched(select_data.get_region().get_id()):
  95. if matcher.is_modified(select_data.get_region().get_id()):
  96. is_modified = True
  97. else:
  98. is_modified = False
  99. diff = core.db.loader.DiffData(select_data,
  100. file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
  101. if warn_plugin.is_mode_matched(limit.limit, select_data.get_data(limit.namespace, limit.field), diff, is_modified):
  102. exit_code += 1
  103. region_cursor = 0
  104. region_name = ""
  105. if select_data.get_region() != None:
  106. region_cursor = select_data.get_region().cursor
  107. region_name = select_data.get_region().name
  108. report_limit_exceeded(select_data.get_path(),
  109. region_cursor,
  110. limit.namespace,
  111. limit.field,
  112. region_name,
  113. select_data.get_data(limit.namespace, limit.field),
  114. diff,
  115. limit.limit,
  116. is_modified)
  117. return exit_code
  118. def get_list_of_modified_files(loader, loader_prev):
  119. logging.info("Identifying changed files...")
  120. old_files_map = {}
  121. for each in loader_prev.iterate_file_data():
  122. old_files_map[each.get_path()] = each.get_checksum()
  123. if len(old_files_map) == 0:
  124. return None
  125. modified_file_ids = []
  126. for each in loader.iterate_file_data():
  127. if len(modified_file_ids) > 1000: # If more than 1000 files changed, skip optimisation
  128. return None
  129. if (each.get_path() not in old_files_map.keys()) or old_files_map[each.get_path()] != each.get_checksum():
  130. modified_file_ids.append(str(each.get_id()))
  131. old_files_map = None
  132. if len(modified_file_ids) != 0:
  133. modified_file_ids = " , ".join(modified_file_ids)
  134. modified_file_ids = "(" + modified_file_ids + ")"
  135. return modified_file_ids
  136. return None
  137. def report_limit_exceeded(path, cursor, namespace, field, region_name, stat_level, trend_value, stat_limit, is_modified):
  138. message = "Metric '" + namespace + "/" + field + "' for region '" + region_name + "' exceeds the limit."
  139. details = [("Metric name", namespace + "/" + field),
  140. ("Region name", region_name),
  141. ("Metric value", stat_level),
  142. ("Modified", is_modified),
  143. ("Change trend", '{0:{1}}'.format(trend_value, '+' if trend_value else '')),
  144. ("Limit", stat_limit)]
  145. core.export.cout.cout(path, cursor, core.export.cout.SEVERITY_WARNING, message, details)
  146. if __name__ == '__main__':
  147. ts = time.time()
  148. core.log.set_default_format()
  149. exit_code = main()
  150. logging.warning("Exit code: " + str(exit_code) + ". Time spent: " + str(round((time.time() - ts), 2)) + " seconds. Done")
  151. exit(exit_code)