loader.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776
  1. '''
  2. Created on 25/07/2012
  3. @author: konstaa
  4. '''
  5. import logging
  6. import os.path
  7. import core.api
  8. import core.db.sqlite
  9. ####################################
  10. # Data Interface
  11. ####################################
  12. class Data(object):
  13. def __init__(self):
  14. self.data = {}
  15. def get_data(self, namespace, field):
  16. if namespace not in self.data.keys():
  17. return None
  18. if field not in self.data[namespace].keys():
  19. return None
  20. return self.data[namespace][field]
  21. def set_data(self, namespace, field, value):
  22. if namespace not in self.data:
  23. self.data[namespace] = {}
  24. self.data[namespace][field] = value
  25. def iterate_namespaces(self):
  26. for namespace in self.data.keys():
  27. yield namespace
  28. def iterate_fields(self, namespace):
  29. for field in self.data[namespace].keys():
  30. yield (field, self.data[namespace][field])
  31. def get_data_tree(self, namespaces=None):
  32. return self.data
  33. def __repr__(self):
  34. return object.__repr__(self) + " with data " + self.data.__repr__()
  35. class LoadableData(Data):
  36. def __init__(self, loader, file_id, region_id):
  37. Data.__init__(self)
  38. self.loader = loader
  39. self.file_id = file_id
  40. self.region_id = region_id
  41. self.loaded_namespaces = []
  42. self.changed_namespaces = []
  43. def load_namespace(self, namespace):
  44. try:
  45. row = self.loader.db.get_row(namespace, self.file_id, self.region_id)
  46. except Exception:
  47. logging.debug("No data in the database for namespace: " + namespace)
  48. return
  49. if row == None:
  50. return
  51. for column_name in row.keys():
  52. packager = self.loader.get_namespace(namespace).get_field_packager(column_name)
  53. if packager == None:
  54. continue
  55. Data.set_data(self, namespace, column_name, packager.unpack(row[column_name]))
  56. def set_data(self, namespace, field, value):
  57. if namespace not in self.changed_namespaces:
  58. self.changed_namespaces.append(namespace)
  59. return Data.set_data(self, namespace, field, value)
  60. def get_data(self, namespace, field):
  61. if namespace not in self.loaded_namespaces:
  62. self.loaded_namespaces.append(namespace)
  63. self.load_namespace(namespace)
  64. return Data.get_data(self, namespace, field)
  65. def is_namespace_updated(self, namespace):
  66. return namespace in self.changed_namespaces
  67. def is_namespace_loaded(self, namespace):
  68. return namespace in self.loaded_namespaces
  69. def get_data_tree(self, namespaces=None):
  70. if namespaces == None:
  71. namespaces = self.loader.iterate_namespace_names()
  72. for each in namespaces:
  73. self.load_namespace(each)
  74. return Data.get_data_tree(self)
  75. class FileRegionData(LoadableData):
  76. class T(object):
  77. NONE = 0x00
  78. GLOBAL = 0x01
  79. CLASS = 0x02
  80. STRUCT = 0x04
  81. NAMESPACE = 0x08
  82. FUNCTION = 0x10
  83. ANY = 0xFFFFFFFF
  84. def to_str(self, group):
  85. if group == self.NONE:
  86. return "none"
  87. elif group == self.GLOBAL:
  88. return "global"
  89. elif group == self.CLASS:
  90. return "class"
  91. elif group == self.STRUCT:
  92. return "struct"
  93. elif group == self.NAMESPACE:
  94. return "namespace"
  95. elif group == self.FUNCTION:
  96. return "function"
  97. else:
  98. assert(False)
  99. def __init__(self, loader, file_id, region_id, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum):
  100. LoadableData.__init__(self, loader, file_id, region_id)
  101. self.name = region_name
  102. self.begin = offset_begin
  103. self.end = offset_end
  104. self.line_begin = line_begin
  105. self.line_end = line_end
  106. self.cursor = cursor_line
  107. self.group = group
  108. self.checksum = checksum
  109. self.children = []
  110. def get_id(self):
  111. return self.region_id
  112. def get_name(self):
  113. return self.name
  114. def get_offset_begin(self):
  115. return self.begin
  116. def get_offset_end(self):
  117. return self.end
  118. def get_line_begin(self):
  119. return self.line_begin
  120. def get_line_end(self):
  121. return self.line_end
  122. def get_cursor(self):
  123. return self.cursor
  124. def get_type(self):
  125. return self.group
  126. def get_checksum(self):
  127. return self.checksum
  128. def register_subregion_id(self, child_id):
  129. self.children.append(child_id)
  130. def iterate_subregion_ids(self):
  131. return self.children
  132. class Marker(object):
  133. class T(object):
  134. NONE = 0x00
  135. COMMENT = 0x01
  136. STRING = 0x02
  137. PREPROCESSOR = 0x04
  138. ALL_EXCEPT_CODE = 0x07
  139. def __init__(self, offset_begin, offset_end, group):
  140. self.begin = offset_begin
  141. self.end = offset_end
  142. self.group = group
  143. def get_offset_begin(self):
  144. return self.begin
  145. def get_offset_end(self):
  146. return self.end
  147. def get_type(self):
  148. return self.group
  149. class FileData(LoadableData):
  150. def __init__(self, loader, path, file_id, checksum, content):
  151. LoadableData.__init__(self, loader, file_id, None)
  152. self.path = path
  153. self.checksum = checksum
  154. self.content = content
  155. self.regions = None
  156. self.markers = None
  157. self.loader = loader
  158. self.loading_tmp = []
  159. def get_id(self):
  160. return self.file_id
  161. def get_path(self):
  162. return self.path
  163. def get_checksum(self):
  164. return self.checksum
  165. def get_content(self, exclude = Marker.T.NONE):
  166. if exclude == Marker.T.NONE:
  167. return self.content
  168. if exclude == (Marker.T.COMMENT | Marker.T.STRING | Marker.T.PREPROCESSOR):
  169. # optimise frequent queries of this type
  170. if hasattr(self, 'content_cache'):
  171. return self.content_cache
  172. last_pos = 0
  173. content = ""
  174. for marker in self.iterate_markers(exclude):
  175. content += self.content[last_pos:marker.begin]
  176. content += " " * (marker.end - marker.begin)
  177. last_pos = marker.end
  178. content += self.content[last_pos:]
  179. if exclude == (Marker.T.COMMENT | Marker.T.STRING | Marker.T.PREPROCESSOR):
  180. self.content_cache = content
  181. assert(len(content) == len(self.content))
  182. return content
  183. def internal_append_region(self, region):
  184. # here we apply some magic - we rely on special ordering of coming regions,
  185. # which is supported by code parsers
  186. prev_id = None
  187. while True:
  188. if len(self.loading_tmp) == 0:
  189. break
  190. prev_id = self.loading_tmp.pop()
  191. if self.get_region(prev_id).get_offset_end() > region.get_offset_begin():
  192. self.loading_tmp.append(prev_id) # return back
  193. break
  194. self.loading_tmp.append(region.get_id())
  195. if prev_id != None:
  196. self.get_region(prev_id).register_subregion_id(region.get_id())
  197. self.regions.append(region)
  198. def load_regions(self):
  199. if self.regions == None:
  200. self.regions = []
  201. for each in self.loader.db.iterate_regions(self.get_id()):
  202. self.internal_append_region(FileRegionData(self.loader,
  203. self.get_id(),
  204. each.region_id,
  205. each.name,
  206. each.begin,
  207. each.end,
  208. each.line_begin,
  209. each.line_end,
  210. each.cursor,
  211. each.group,
  212. each.checksum))
  213. assert(len(self.regions) == each.region_id)
  214. def add_region(self, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum):
  215. if self.regions == None:
  216. self.regions = [] # do not load in time of collection
  217. new_id = len(self.regions) + 1
  218. self.internal_append_region(FileRegionData(self.loader, self.get_id(), new_id, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum))
  219. self.loader.db.create_region(self.file_id, new_id, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum)
  220. return new_id
  221. def get_region(self, region_id):
  222. self.load_regions()
  223. return self.regions[region_id - 1]
  224. def iterate_regions(self, filter_group = FileRegionData.T.ANY):
  225. self.load_regions()
  226. for each in self.regions:
  227. if each.group & filter_group:
  228. yield each
  229. def are_regions_loaded(self):
  230. return self.regions != None
  231. def load_markers(self):
  232. if self.markers == None:
  233. self.markers = []
  234. for each in self.loader.db.iterate_markers(self.get_id()):
  235. self.markers.append(self.Marker(each.begin, each.end, each.group))
  236. def add_marker(self, offset_begin, offset_end, group):
  237. if self.markers == None:
  238. self.markers = [] # do not load in time of collection
  239. self.markers.append(Marker(offset_begin, offset_end, group))
  240. self.loader.db.create_marker(self.file_id, offset_begin, offset_end, group)
  241. def iterate_markers(self, filter_group = Marker.T.COMMENT |
  242. Marker.T.STRING | Marker.T.PREPROCESSOR):
  243. self.load_markers()
  244. for each in self.markers:
  245. if each.group & filter_group:
  246. yield each
  247. def get_marker_types(self):
  248. return Marker.T
  249. def get_region_types(self):
  250. return FileRegionData.T
  251. def are_markers_loaded(self):
  252. return self.markers != None
  253. def __repr__(self):
  254. return Data.__repr__(self) + " and regions " + self.regions.__repr__()
  255. class AggregatedData(Data):
  256. def __init__(self, loader, path):
  257. Data.__init__(self)
  258. self.path = path
  259. self.loader = loader
  260. self.subdirs = None
  261. self.subfiles = None
  262. def get_subdirs(self):
  263. if self.subdirs != None:
  264. return self.subdirs
  265. self.subdirs = []
  266. if self.path != None:
  267. for subdir in self.loader.db.iterate_dircontent(self.path, include_subdirs = True, include_subfiles = False):
  268. self.subdirs.append(subdir)
  269. return self.subdirs
  270. def get_subfiles(self):
  271. if self.subfiles != None:
  272. return self.subfiles
  273. self.subfiles = []
  274. if self.path != None:
  275. for subfile in self.loader.db.iterate_dircontent(self.path, include_subdirs = False, include_subfiles = True):
  276. self.subfiles.append(subfile)
  277. return self.subfiles
  278. class SelectData(Data):
  279. def __init__(self, loader, path, file_id, region_id):
  280. Data.__init__(self)
  281. self.loader = loader
  282. self.path = path
  283. self.file_id = file_id
  284. self.region_id = region_id
  285. self.region = None
  286. def get_path(self):
  287. return self.path
  288. def get_region(self):
  289. if self.region == None and self.region_id != None:
  290. row = self.loader.db.get_region(self.file_id, self.region_id)
  291. if row != None:
  292. self.region = FileRegionData(self.loader,
  293. self.file_id,
  294. self.region_id,
  295. row.name,
  296. row.begin,
  297. row.end,
  298. row.line_begin,
  299. row.line_end,
  300. row.cursor,
  301. row.group,
  302. row.checksum)
  303. return self.region
  304. class DiffData(Data):
  305. def __init__(self, new_data, old_data):
  306. Data.__init__(self)
  307. self.new_data = new_data
  308. self.old_data = old_data
  309. def get_data(self, namespace, field):
  310. new_data = self.new_data.get_data(namespace, field)
  311. old_data = self.old_data.get_data(namespace, field)
  312. if new_data == None or old_data == None:
  313. return None
  314. return new_data - old_data
  315. ####################################
  316. # Packager Interface
  317. ####################################
  318. class PackagerError(Exception):
  319. def __init__(self):
  320. Exception.__init__(self, "Failed to pack or unpack.")
  321. class PackagerFactory(object):
  322. def create(self, python_type):
  323. if python_type == None:
  324. return PackagerFactory.SkipPackager()
  325. if python_type == int:
  326. return PackagerFactory.IntPackager()
  327. if python_type == float:
  328. return PackagerFactory.FloatPackager()
  329. if python_type == str:
  330. return PackagerFactory.StringPackager()
  331. class PackagerFactoryError(Exception):
  332. def __init__(self, python_type):
  333. Exception.__init__(self, "Python type '" + str(python_type) + "' is not supported by the factory.")
  334. raise PackagerFactoryError(python_type)
  335. def get_python_type(self, sql_type):
  336. if sql_type == "integer":
  337. return int
  338. if sql_type == "real":
  339. return float
  340. if sql_type == "text":
  341. return str
  342. class PackagerFactoryError(Exception):
  343. def __init__(self, sql_type):
  344. Exception.__init__(self, "SQL type '" + str(sql_type) + "' is not supported by the factory.")
  345. raise PackagerFactoryError(sql_type)
  346. class IPackager(object):
  347. def pack(self, unpacked_data):
  348. raise core.api.InterfaceNotImplemented(self)
  349. def unpack(self, packed_data):
  350. raise core.api.InterfaceNotImplemented(self)
  351. def get_sql_type(self):
  352. raise core.api.InterfaceNotImplemented(self)
  353. def get_python_type(self):
  354. raise core.api.InterfaceNotImplemented(self)
  355. class IntPackager(IPackager):
  356. def pack(self, unpacked_data):
  357. if not isinstance(unpacked_data, int):
  358. raise PackagerError()
  359. return str(unpacked_data)
  360. def unpack(self, packed_data):
  361. try:
  362. return int(packed_data)
  363. except ValueError:
  364. raise PackagerError()
  365. def get_sql_type(self):
  366. return "integer"
  367. def get_python_type(self):
  368. return int
  369. class FloatPackager(IPackager):
  370. def pack(self, unpacked_data):
  371. if not isinstance(unpacked_data, float):
  372. raise PackagerError()
  373. return str(unpacked_data)
  374. def unpack(self, packed_data):
  375. try:
  376. return float(packed_data)
  377. except ValueError:
  378. raise PackagerError()
  379. def get_sql_type(self):
  380. return "real"
  381. def get_python_type(self):
  382. return float
  383. class StringPackager(IPackager):
  384. def pack(self, unpacked_data):
  385. if not isinstance(unpacked_data, str):
  386. raise PackagerError()
  387. return str(unpacked_data)
  388. def unpack(self, packed_data):
  389. try:
  390. return str(packed_data)
  391. except ValueError:
  392. raise PackagerError()
  393. def get_sql_type(self):
  394. return "text"
  395. def get_python_type(self):
  396. return str
  397. class SkipPackager(IPackager):
  398. def pack(self, unpacked_data):
  399. return None
  400. def unpack(self, packed_data):
  401. return None
  402. def get_sql_type(self):
  403. return None
  404. def get_python_type(self):
  405. return None
  406. ####################################
  407. # Loader
  408. ####################################
  409. class NamespaceError(Exception):
  410. def __init__(self, namespace, reason):
  411. Exception.__init__(self, "Namespace '"
  412. + namespace
  413. + "': '"
  414. + reason
  415. + "'")
  416. class FieldError(Exception):
  417. def __init__(self, field, reason):
  418. Exception.__init__(self, "Field '"
  419. + field
  420. + "': '"
  421. + reason
  422. + "'")
  423. class Namespace(object):
  424. def __init__(self, db_handle, name, support_regions = False):
  425. if not isinstance(name, str):
  426. raise NamespaceError(name, "name not a string")
  427. self.name = name
  428. self.support_regions = support_regions
  429. self.fields = {}
  430. self.db = db_handle
  431. if self.db.check_table(name) == False:
  432. self.db.create_table(name, support_regions)
  433. else:
  434. for column in self.db.iterate_columns(name):
  435. self.add_field(column.name, PackagerFactory().get_python_type(column.sql_type))
  436. def get_name(self):
  437. return self.name
  438. def are_regions_supported(self):
  439. return self.support_regions
  440. def add_field(self, field_name, python_type):
  441. if not isinstance(field_name, str):
  442. raise FieldError(field_name, "field_name not a string")
  443. packager = PackagerFactory().create(python_type)
  444. if field_name in self.fields.keys():
  445. raise FieldError(field_name, "double used")
  446. self.fields[field_name] = packager
  447. if self.db.check_column(self.get_name(), field_name) == False:
  448. self.db.create_column(self.name, field_name, packager.get_sql_type())
  449. def iterate_field_names(self):
  450. for name in self.fields.keys():
  451. yield name
  452. def get_field_packager(self, field_name):
  453. if field_name in self.fields.keys():
  454. return self.fields[field_name]
  455. else:
  456. return None
  457. def get_field_sql_type(self, field_name):
  458. return self.get_field_packager(field_name).get_sql_type()
  459. def get_field_python_type(self, field_name):
  460. return self.get_field_packager(field_name).get_python_type()
  461. class DataNotPackable(Exception):
  462. def __init__(self, namespace, field, value, packager, extra_message):
  463. Exception.__init__(self, "Data '"
  464. + str(value)
  465. + "' of type "
  466. + str(value.__class__)
  467. + " referred by '"
  468. + namespace
  469. + "=>"
  470. + field
  471. + "' is not packable by registered packager '"
  472. + str(packager.__class__)
  473. + "': " + extra_message)
  474. class Loader(object):
  475. def __init__(self):
  476. self.namespaces = {}
  477. self.db = None
  478. self.last_file_data = None # for performance boost reasons
  479. def create_database(self, dbfile, previous_db = None):
  480. self.db = core.db.sqlite.Database()
  481. if os.path.exists(dbfile):
  482. logging.warn("Removing existing file: " + dbfile)
  483. os.unlink(dbfile)
  484. if previous_db != None and os.path.exists(previous_db) == False:
  485. raise core.api.ExitError(self, "Database file '" + previous_db + "' does not exist")
  486. self.db.create(dbfile, clone_from=previous_db)
  487. def open_database(self, dbfile, read_only = True):
  488. self.db = core.db.sqlite.Database()
  489. if os.path.exists(dbfile) == False:
  490. raise core.api.ExitError(self, "Database file '" + dbfile + "' does not exist")
  491. self.db.connect(dbfile, read_only=read_only)
  492. for table in self.db.iterate_tables():
  493. self.create_namespace(table.name, table.support_regions)
  494. def create_namespace(self, name, support_regions = False):
  495. if self.db == None:
  496. return None
  497. if name in self.namespaces.keys():
  498. raise NamespaceError(name, "double used")
  499. new_namespace = Namespace(self.db, name, support_regions)
  500. self.namespaces[name] = new_namespace
  501. return new_namespace
  502. def iterate_namespace_names(self):
  503. for name in self.namespaces.keys():
  504. yield name
  505. def get_namespace(self, name):
  506. if name in self.namespaces.keys():
  507. return self.namespaces[name]
  508. else:
  509. return None
  510. def create_file_data(self, path, checksum, content):
  511. if self.db == None:
  512. return None
  513. new_id = self.db.create_file(path, checksum)
  514. result = FileData(self, path, new_id, checksum, content)
  515. self.last_file_data = result
  516. return result
  517. def load_file_data(self, path):
  518. if self.db == None:
  519. return None
  520. if self.last_file_data != None and self.last_file_data.get_path() == path:
  521. return self.last_file_data
  522. data = self.db.get_file(path)
  523. if data == None:
  524. return None
  525. result = FileData(self, data.path, data.id, data.checksum, None)
  526. self.last_file_data = result
  527. return result
  528. def save_file_data(self, file_data):
  529. if self.db == None:
  530. return None
  531. class DataIterator(object):
  532. def iterate_packed_values(self, data, namespace, support_regions = False):
  533. for each in data.iterate_fields(namespace):
  534. space = self.loader.get_namespace(namespace)
  535. if space == None:
  536. raise DataNotPackable(namespace, each[0], each[1], None, "The namespace has not been found")
  537. packager = space.get_field_packager(each[0])
  538. if packager == None:
  539. raise DataNotPackable(namespace, each[0], each[1], None, "The field has not been found")
  540. if space.support_regions != support_regions:
  541. raise DataNotPackable(namespace, each[0], each[1], packager, "Incompatible support for regions")
  542. try:
  543. packed_data = packager.pack(each[1])
  544. if packed_data == None:
  545. continue
  546. except PackagerError:
  547. raise DataNotPackable(namespace, each[0], each[1], packager, "Packager raised exception")
  548. yield (each[0], packed_data)
  549. def __init__(self, loader, data, namespace, support_regions = False):
  550. self.loader = loader
  551. self.iterator = self.iterate_packed_values(data, namespace, support_regions)
  552. def __iter__(self):
  553. return self.iterator
  554. for namespace in file_data.iterate_namespaces():
  555. if file_data.is_namespace_updated(namespace) == False:
  556. continue
  557. self.db.add_row(namespace,
  558. file_data.get_id(),
  559. None,
  560. DataIterator(self, file_data, namespace))
  561. if file_data.are_regions_loaded():
  562. for region in file_data.iterate_regions():
  563. for namespace in region.iterate_namespaces():
  564. if region.is_namespace_updated(namespace) == False:
  565. continue
  566. self.db.add_row(namespace,
  567. file_data.get_id(),
  568. region.get_id(),
  569. DataIterator(self, region, namespace, support_regions = True))
  570. def iterate_file_data(self):
  571. if self.db != None:
  572. for data in self.db.iterate_files():
  573. yield FileData(self, data.path, data.id, data.checksum, None)
  574. def load_aggregated_data(self, path = None, path_like_filter = "%", namespaces = None):
  575. if self.db == None:
  576. return None
  577. final_path_like = path_like_filter
  578. if path != None:
  579. if self.db.check_dir(path) == False and self.db.check_file(path) == False:
  580. return None
  581. final_path_like = path + path_like_filter
  582. if namespaces == None:
  583. namespaces = self.namespaces.keys()
  584. result = AggregatedData(self, path)
  585. for name in namespaces:
  586. data = self.db.aggregate_rows(name, path_like = final_path_like)
  587. for field in data.keys():
  588. result.set_data(name, field, data[field])
  589. return result
  590. def load_selected_data(self, namespace, fields = None, path = None, path_like_filter = "%", filters = []):
  591. if self.db == None:
  592. return None
  593. final_path_like = path_like_filter
  594. if path != None:
  595. if self.db.check_dir(path) == False and self.db.check_file(path) == False:
  596. return None
  597. final_path_like = path + path_like_filter
  598. namespace_obj = self.get_namespace(namespace)
  599. if namespace_obj == None:
  600. return None
  601. class SelectDataIterator(object):
  602. def iterate_selected_values(self, loader, namespace_obj, final_path_like, fields, filters):
  603. for row in loader.db.select_rows(namespace_obj.get_name(), path_like=final_path_like, filters=filters):
  604. region_id = None
  605. if namespace_obj.are_regions_supported() == True:
  606. region_id = row['region_id']
  607. data = SelectData(loader, row['path'], row['id'], region_id)
  608. field_names = fields
  609. if fields == None:
  610. field_names = namespace_obj.iterate_field_names()
  611. for field in field_names:
  612. data.set_data(namespace, field, row[field])
  613. yield data
  614. def __init__(self, loader, namespace_obj, final_path_like, fields, filters):
  615. self.iterator = self.iterate_selected_values(loader, namespace_obj, final_path_like, fields, filters)
  616. def __iter__(self):
  617. return self.iterator
  618. return SelectDataIterator(self, namespace_obj, final_path_like, fields, filters)