collect.py 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import mpp.api
  20. import re
  21. import os
  22. import logging
  23. import time
  24. import binascii
  25. import fnmatch
  26. import multiprocessing.pool
  27. class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IConfigurable, mpp.api.IRunable):
  28. def __init__(self):
  29. self.reader = DirectoryReader()
  30. self.include_rules = []
  31. self.exclude_rules = []
  32. self.exclude_files = []
  33. self.parsers = []
  34. super(Plugin, self).__init__()
  35. def declare_configuration(self, parser):
  36. parser.add_option("--std.general.proctime", "--sgpt", action="store_true", default=False,
  37. help="If the option is set (True), the tool measures processing time per file [default: %default]")
  38. parser.add_option("--std.general.procerrors", "--sgpe", action="store_true", default=False,
  39. help="If the option is set (True), the tool counts number of processing/parsing errors per file [default: %default]")
  40. parser.add_option("--std.general.size", "--sgs", action="store_true", default=False,
  41. help="If the option is set (True), the tool collects file size metric (in bytes) [default: %default]")
  42. parser.add_option("--include-files", "--if", default=r'.*',
  43. help="Defines the regular expression pattern to include files in processing [default: %default]")
  44. parser.add_option("--exclude-files", "--ef", default=r'^[.]',
  45. help="Defines the regular expression pattern to exclude files from processing [default: %default]")
  46. parser.add_option("--non-recursively", "--nr", action="store_true", default=False,
  47. help="If the option is set (True), sub-directories are not processed [default: %default]")
  48. self.optparser = parser
  49. def configure(self, options):
  50. self.is_proctime_enabled = options.__dict__['std.general.proctime']
  51. self.is_procerrors_enabled = options.__dict__['std.general.procerrors']
  52. self.is_size_enabled = options.__dict__['std.general.size']
  53. try:
  54. self.add_include_rule(re.compile(options.__dict__['include_files']))
  55. except Exception as e:
  56. self.optparser.error("option --include-files: " + str(e))
  57. try:
  58. self.add_exclude_rule(re.compile(options.__dict__['exclude_files']))
  59. except Exception as e:
  60. self.optparser.error("option --exclude-files: " + str(e))
  61. self.non_recursively = options.__dict__['non_recursively']
  62. def initialize(self):
  63. fields = []
  64. if self.is_proctime_enabled == True:
  65. fields.append(self.Field('proctime', float))
  66. if self.is_procerrors_enabled == True:
  67. fields.append(self.Field('procerrors', int))
  68. if self.is_size_enabled == True:
  69. fields.append(self.Field('size', int))
  70. super(Plugin, self).initialize(namespace='std.general', support_regions=False, fields=fields)
  71. self.add_exclude_file(self.get_plugin('mpp.dbf').get_dbfile_path())
  72. self.add_exclude_file(self.get_plugin('mpp.dbf').get_dbfile_prev_path())
  73. def run(self, args):
  74. if len(args) == 0:
  75. return self.reader.run(self, "./")
  76. retcode = 0
  77. for directory in args:
  78. retcode += self.reader.run(self, directory)
  79. return retcode
  80. def register_parser(self, fnmatch_exp_list, parser):
  81. self.parsers.append((fnmatch_exp_list, parser))
  82. def get_parser(self, file_path):
  83. for parser in self.parsers:
  84. for fnmatch_exp in parser[0]:
  85. if fnmatch.fnmatch(file_path, fnmatch_exp):
  86. return parser[1]
  87. return None
  88. def add_include_rule(self, re_compiled_pattern):
  89. self.include_rules.append(re_compiled_pattern)
  90. def add_exclude_rule(self, re_compiled_pattern):
  91. self.exclude_rules.append(re_compiled_pattern)
  92. def add_exclude_file(self, file_path):
  93. if file_path == None:
  94. return
  95. self.exclude_files.append(file_path)
  96. def is_file_excluded(self, file_name):
  97. for each in self.include_rules:
  98. if re.match(each, os.path.basename(file_name)) == None:
  99. return True
  100. for each in self.exclude_rules:
  101. if re.match(each, os.path.basename(file_name)) != None:
  102. return True
  103. for each in self.exclude_files:
  104. if os.path.basename(each) == os.path.basename(file_name):
  105. if os.stat(each) == os.stat(file_name):
  106. return True
  107. return False
  108. class DirectoryReader():
  109. def run(self, plugin, directory):
  110. IS_TEST_MODE = False
  111. if 'METRIXPLUSPLUS_TEST_MODE' in os.environ.keys():
  112. IS_TEST_MODE = True
  113. def run_per_file(plugin, fname, full_path):
  114. exit_code = 0
  115. norm_path = re.sub(r'''[\\]''', "/", full_path)
  116. if os.path.isabs(norm_path) == False and norm_path.startswith('./') == False:
  117. norm_path = './' + norm_path
  118. if plugin.is_file_excluded(norm_path) == False:
  119. if os.path.isdir(full_path):
  120. if plugin.non_recursively == False:
  121. exit_code += run_recursively(plugin, full_path)
  122. else:
  123. parser = plugin.get_parser(full_path)
  124. if parser == None:
  125. logging.info("Skipping: " + norm_path)
  126. else:
  127. logging.info("Processing: " + norm_path)
  128. ts = time.time()
  129. f = open(full_path, 'rU');
  130. text = f.read();
  131. f.close()
  132. checksum = binascii.crc32(text) & 0xffffffff # to match python 3
  133. db_loader = plugin.get_plugin('mpp.dbf').get_loader()
  134. (data, is_updated) = db_loader.create_file_data(norm_path, checksum, text)
  135. procerrors = parser.process(plugin, data, is_updated)
  136. if plugin.is_proctime_enabled == True:
  137. data.set_data('std.general', 'proctime',
  138. (time.time() - ts) if IS_TEST_MODE == False else 0.01)
  139. if plugin.is_procerrors_enabled == True and procerrors != None and procerrors != 0:
  140. data.set_data('std.general', 'procerrors', procerrors)
  141. if plugin.is_size_enabled == True:
  142. data.set_data('std.general', 'size', len(text))
  143. db_loader.save_file_data(data)
  144. #logging.debug("-" * 60)
  145. exit_code += procerrors
  146. else:
  147. logging.info("Excluding: " + norm_path)
  148. return exit_code
  149. #thread_pool = multiprocessing.pool.ThreadPool()
  150. #def mp_worker(args):
  151. # run_per_file(args[0], args[1], args[2])
  152. def run_recursively(plugin, directory):
  153. exit_code = 0
  154. #thread_pool.map(mp_worker,
  155. # [(plugin, f, os.path.join(subdir, f))
  156. # for subdir, dirs, files in os.walk(directory) for f in files])
  157. for fname in sorted(os.listdir(directory)):
  158. full_path = os.path.join(directory, fname)
  159. exit_code += run_per_file(plugin, fname, full_path)
  160. return exit_code
  161. if os.path.exists(directory) == False:
  162. logging.error("Skipping (does not exist): " + directory)
  163. return 1
  164. if os.path.isdir(directory):
  165. total_errors = run_recursively(plugin, directory)
  166. else:
  167. total_errors = run_per_file(plugin, os.path.basename(directory), directory)
  168. total_errors = total_errors # used, warnings are per file if not zero
  169. return 0 # ignore errors, collection is successful anyway