dir.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import core.api
  20. import re
  21. import os
  22. import logging
  23. import time
  24. import binascii
  25. class Plugin(core.api.Plugin, core.api.Parent, core.api.IConfigurable, core.api.IRunable):
  26. def __init__(self):
  27. self.reader = DirectoryReader()
  28. self.exclude_rules = []
  29. def declare_configuration(self, parser):
  30. parser.add_option("--non-recursively", "--nr", action="store_true", default=False,
  31. help="If the option is set (True), sub-directories are not processed [default: %default]")
  32. parser.add_option("--exclude-files", "--ef", default=r'^[.]',
  33. help="Defines the pattern to exclude files from processing [default: %default]")
  34. parser.add_option("--std.general.proctime", "--sgpt", action="store_true", default=False,
  35. help="If the option is set (True), the tool measures processing time per file [default: %default]")
  36. parser.add_option("--std.general.procerrors", "--sgpe", action="store_true", default=False,
  37. help="If the option is set (True), the tool counts number of processing/parsing errors per file [default: %default]")
  38. parser.add_option("--std.general.size", "--sgs", action="store_true", default=False,
  39. help="If the option is set (True), the tool collects file size metric (in bytes) [default: %default]")
  40. def configure(self, options):
  41. self.non_recursively = options.__dict__['non_recursively']
  42. self.add_exclude_rule(re.compile(options.__dict__['exclude_files']))
  43. self.is_proctime_enabled = options.__dict__['std.general.proctime']
  44. self.is_procerrors_enabled = options.__dict__['std.general.procerrors']
  45. self.is_size_enabled = options.__dict__['std.general.size']
  46. def initialize(self):
  47. fields = []
  48. if self.is_proctime_enabled == True:
  49. fields.append(self.Field('proctime', float))
  50. if self.is_procerrors_enabled == True:
  51. fields.append(self.Field('procerrors', int))
  52. if self.is_size_enabled == True:
  53. fields.append(self.Field('size', int))
  54. core.api.Plugin.initialize(self, namespace='std.general', support_regions=False, fields=fields)
  55. def run(self, args):
  56. if len(args) == 0:
  57. return self.reader.run(self, "./")
  58. for directory in args:
  59. return self.reader.run(self, directory)
  60. def add_exclude_rule(self, re_compiled_pattern):
  61. # TODO file name may have special regexp symbols what causes an exception
  62. # For example try to run a collection with "--db-file=metrix++" option
  63. self.exclude_rules.append(re_compiled_pattern)
  64. def is_file_excluded(self, file_name):
  65. for each in self.exclude_rules:
  66. if re.match(each, file_name) != None:
  67. return True
  68. return False
  69. class DirectoryReader():
  70. def run(self, plugin, directory):
  71. def run_per_file(plugin, fname, full_path):
  72. exit_code = 0
  73. norm_path = re.sub(r'''[\\]''', "/", full_path)
  74. if plugin.is_file_excluded(fname) == False:
  75. if os.path.isdir(full_path):
  76. if plugin.non_recursively == False:
  77. exit_code += run_recursively(plugin, full_path)
  78. else:
  79. parser = plugin.get_plugin_loader().get_parser(full_path)
  80. if parser == None:
  81. logging.info("Skipping: " + norm_path)
  82. else:
  83. logging.info("Processing: " + norm_path)
  84. ts = time.time()
  85. f = open(full_path, 'r');
  86. text = f.read();
  87. f.close()
  88. checksum = binascii.crc32(text) & 0xffffffff # to match python 3
  89. (data, is_updated) = plugin.get_plugin_loader().get_database_loader().create_file_data(norm_path, checksum, text)
  90. procerrors = parser.process(plugin, data, is_updated)
  91. if plugin.is_proctime_enabled == True:
  92. data.set_data('std.general', 'proctime', time.time() - ts)
  93. if plugin.is_procerrors_enabled == True and procerrors != None and procerrors != 0:
  94. data.set_data('std.general', 'procerrors', procerrors)
  95. if plugin.is_size_enabled == True:
  96. data.set_data('std.general', 'size', len(text))
  97. plugin.get_plugin_loader().get_database_loader().save_file_data(data)
  98. logging.debug("-" * 60)
  99. exit_code += procerrors
  100. else:
  101. logging.info("Excluding: " + norm_path)
  102. return exit_code
  103. def run_recursively(plugin, directory):
  104. exit_code = 0
  105. for fname in os.listdir(directory):
  106. full_path = os.path.join(directory, fname)
  107. exit_code += run_per_file(plugin, fname, full_path)
  108. return exit_code
  109. if os.path.exists(directory) == False:
  110. logging.error("Skipping (does not exist): " + directory)
  111. return 1
  112. if os.path.isdir(directory):
  113. total_errors = run_recursively(plugin, directory)
  114. else:
  115. total_errors = run_per_file(plugin, os.path.basename(directory), directory)
  116. total_errors = total_errors # used, warnings are per file if not zero
  117. return 0 # ignore errors, collection is successful anyway