export.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. #
  2. # Metrix++, Copyright 2009-2019, Metrix++ Project
  3. # Link: https://github.com/metrixplusplus/metrixplusplus
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. import mpp.api
  8. import mpp.utils
  9. import csv
  10. class Plugin(mpp.api.Plugin, mpp.api.IRunable):
  11. def run(self, args):
  12. self.loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
  13. self.loader = self.get_plugin('mpp.dbf').get_loader()
  14. paths = None
  15. if len(args) == 0:
  16. paths = [""]
  17. else:
  18. paths = args
  19. return self._export_to_stdout(paths)
  20. def _export_to_stdout(self, paths):
  21. class StdoutWriter(object):
  22. def write(self, *args, **kwargs):
  23. print(args[0].strip())
  24. exit_code = 0
  25. columns = []
  26. columnNames = ["file", "region", "type", "modified", "line start", "line end"]
  27. for name in sorted(self.loader.iterate_namespace_names()):
  28. namespace = self.loader.get_namespace(name)
  29. for field in sorted(namespace.iterate_field_names()):
  30. columns.append((name, field))
  31. columnNames.append(name + ":" + field)
  32. writer = StdoutWriter()
  33. csvWriter = csv.writer(writer)
  34. csvWriter.writerow(columnNames)
  35. for path in paths:
  36. path = mpp.utils.preprocess_path(path)
  37. files = self.loader.iterate_file_data(path)
  38. if files == None:
  39. mpp.utils.report_bad_path(path)
  40. exit_code += 1
  41. continue
  42. for file_data in files:
  43. matcher = None
  44. file_data_prev = self.loader_prev.load_file_data(file_data.get_path())
  45. if file_data_prev != None:
  46. matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
  47. for reg in file_data.iterate_regions():
  48. per_reg_data = []
  49. per_reg_data.append(mpp.api.Region.T().to_str(reg.get_type()))
  50. if matcher != None and matcher.is_matched(reg.get_id()):
  51. per_reg_data.append(matcher.is_modified(reg.get_id()))
  52. else:
  53. per_reg_data.append(None)
  54. per_reg_data.append(reg.get_line_begin())
  55. per_reg_data.append(reg.get_line_end())
  56. for column in columns:
  57. per_reg_data.append(reg.get_data(column[0], column[1]))
  58. csvWriter.writerow([file_data.get_path(), reg.get_name()] + per_reg_data)
  59. per_file_data = []
  60. per_file_data.append('file')
  61. if file_data_prev != None:
  62. per_file_data.append(file_data.get_checksum() != file_data_prev.get_checksum())
  63. else:
  64. per_file_data.append(None)
  65. per_file_data.append(file_data.get_region(1).get_line_begin())
  66. per_file_data.append(file_data.get_region(1).get_line_end())
  67. for column in columns:
  68. per_file_data.append(file_data.get_data(column[0], column[1]))
  69. csvWriter.writerow([file_data.get_path(), None] + per_file_data)
  70. return exit_code