sqlite.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://metrixplusplus.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import sqlite3
  20. import re
  21. import os.path
  22. import logging
  23. import itertools
  24. import shutil
  25. class Database(object):
  26. last_used_id = 0
  27. version = "1.0"
  28. class TableData(object):
  29. def __init__(self, table_id, name, support_regions):
  30. self.id = table_id
  31. self.name = name
  32. self.support_regions = support_regions
  33. class ColumnData(object):
  34. def __init__(self, column_id, name, sql_type, non_zero):
  35. self.id = column_id
  36. self.name = name
  37. self.sql_type = sql_type
  38. self.non_zero = non_zero
  39. class TagData(object):
  40. def __init__(self, tag_id, name):
  41. self.id = tag_id
  42. self.name = name
  43. class FileData(object):
  44. def __init__(self, file_id, path, checksum):
  45. self.id = file_id
  46. self.path = path
  47. self.checksum = checksum
  48. class RegionData(object):
  49. def __init__(self, file_id, region_id, name, begin, end, line_begin, line_end, cursor, group, checksum):
  50. self.file_id = file_id
  51. self.region_id = region_id
  52. self.name = name
  53. self.begin = begin
  54. self.end = end
  55. self.line_begin = line_begin
  56. self.line_end = line_end
  57. self.cursor = cursor
  58. self.group = group
  59. self.checksum = checksum
  60. class MarkerData(object):
  61. def __init__(self, file_id, begin, end, group):
  62. self.file_id = file_id
  63. self.begin = begin
  64. self.end = end
  65. self.group = group
  66. def __init__(self):
  67. self.read_only = False
  68. self.conn = None
  69. self.dirs = None
  70. self.is_cloned = False
  71. self.last_used_id += 1
  72. self.id = self.last_used_id
  73. def __del__(self):
  74. if self.conn != None:
  75. if self.is_cloned == True:
  76. logging.debug("Cleaning up database file")
  77. self.InternalCleanUpUtils().clean_up_not_confirmed(self)
  78. logging.debug("Committing database file")
  79. self.conn.commit()
  80. class InternalCleanUpUtils(object):
  81. def clean_up_not_confirmed(self, db_loader):
  82. sql = "DELETE FROM __tags__ WHERE (confirmed = 0)"
  83. db_loader.log(sql)
  84. db_loader.conn.execute(sql)
  85. sql = "SELECT * FROM __tables__ WHERE (confirmed = 0)"
  86. db_loader.log(sql)
  87. for table in db_loader.conn.execute(sql).fetchall():
  88. sql = "DELETE FROM __columns__ WHERE table_id = '" + str(table['id']) + "'"
  89. db_loader.log(sql)
  90. db_loader.conn.execute(sql)
  91. sql = "DELETE FROM __tables__ WHERE id = '" + str(table['id']) + "'"
  92. db_loader.log(sql)
  93. db_loader.conn.execute(sql)
  94. sql = "DROP TABLE '" + table['name'] + "'"
  95. db_loader.log(sql)
  96. db_loader.conn.execute(sql)
  97. sql = "SELECT __columns__.name AS column_name, __tables__.name AS table_name, __columns__.id AS column_id FROM __columns__, __tables__ WHERE (__columns__.confirmed = 0 AND __columns__.table_id = __tables__.id)"
  98. db_loader.log(sql)
  99. for column in db_loader.conn.execute(sql).fetchall():
  100. logging.warn("New database file inherits useless column: '" + column['table_name'] + "'.'" + column['column_name'] + "'")
  101. sql = "DELETE FROM __columns__ WHERE id = '" + str(column['column_id']) + "'"
  102. db_loader.log(sql)
  103. db_loader.conn.execute(sql)
  104. sql = "UPDATE '" + column['table_name'] + "' SET '" + column['column_name'] + "' = NULL"
  105. db_loader.log(sql)
  106. db_loader.conn.execute(sql)
  107. self.clean_up_file(db_loader)
  108. def clean_up_file(self, db_loader, file_id = None):
  109. sql = "SELECT * FROM __tables__"
  110. db_loader.log(sql)
  111. for table in itertools.chain(db_loader.conn.execute(sql).fetchall(), [{'name':'__regions__'}, {'name':'__markers__'}]):
  112. sql = ""
  113. if file_id == None:
  114. sql = "DELETE FROM '" + table['name'] + "' WHERE file_id IN (SELECT __files__.id FROM __files__ WHERE __files__.confirmed = 0)"
  115. else:
  116. sql = "DELETE FROM '" + table['name'] + "' WHERE (file_id = " + str(file_id) + ")"
  117. db_loader.log(sql)
  118. db_loader.conn.execute(sql)
  119. class InternalPathUtils(object):
  120. def iterate_heads(self, path):
  121. dirs = []
  122. head = os.path.dirname(path)
  123. last_head = None # to process Windows drives
  124. while (head != "" and last_head != head):
  125. dirs.append(os.path.basename(head))
  126. last_head = head
  127. head = os.path.dirname(head)
  128. dirs.reverse()
  129. for each in dirs:
  130. yield each
  131. def normalize_path(self, path):
  132. if path == None:
  133. return None
  134. return re.sub(r'''[\\]''', "/", path)
  135. def update_dirs(self, db_loader, path = None):
  136. if db_loader.dirs == None:
  137. if path == None:
  138. db_loader.dirs = {} # initial construction
  139. else:
  140. return # avoid useless cache updates
  141. elif path == None:
  142. return # avoid multiple initial constructions
  143. path = self.normalize_path(path)
  144. rows = None
  145. if path == None:
  146. sql = "SELECT * FROM __files__"
  147. db_loader.log(sql)
  148. rows = db_loader.conn.execute(sql).fetchall()
  149. else:
  150. rows = [{"path": path}]
  151. for row in rows:
  152. cur_head = db_loader.dirs
  153. for dir_name in self.iterate_heads(row["path"]):
  154. if dir_name not in cur_head.keys():
  155. cur_head[dir_name] = {}
  156. cur_head = cur_head[dir_name]
  157. cur_head[os.path.basename(row["path"])] = None
  158. def create(self, file_name, clone_from = None):
  159. if clone_from != None:
  160. self.is_cloned = True
  161. logging.debug("Cloning database file: " + clone_from)
  162. shutil.copy2(clone_from, file_name)
  163. logging.debug("Connecting database file: " + file_name)
  164. self.conn = sqlite3.connect(file_name)
  165. self.conn.row_factory = sqlite3.Row
  166. self.read_only = False
  167. sql = "UPDATE __tables__ SET confirmed = 0"
  168. self.log(sql)
  169. self.conn.execute(sql)
  170. sql = "UPDATE __columns__ SET confirmed = 0"
  171. self.log(sql)
  172. self.conn.execute(sql)
  173. sql = "UPDATE __tags__ SET confirmed = 0"
  174. self.log(sql)
  175. self.conn.execute(sql)
  176. sql = "UPDATE __files__ SET confirmed = 0"
  177. self.log(sql)
  178. self.conn.execute(sql)
  179. else:
  180. self.connect(file_name)
  181. def connect(self, file_name, read_only = False):
  182. logging.debug("Connecting database file: " + file_name)
  183. self.conn = sqlite3.connect(file_name)
  184. self.conn.row_factory = sqlite3.Row
  185. self.read_only = read_only
  186. if self.read_only == False:
  187. try:
  188. sql = "CREATE TABLE __info__ (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, property text NOT NULL, value text, UNIQUE (property) ON CONFLICT REPLACE)"
  189. self.log(sql)
  190. self.conn.execute(sql)
  191. sql = "INSERT INTO __info__ (property, value) VALUES ('version', '" + self.version + "')"
  192. self.log(sql)
  193. self.conn.execute(sql)
  194. sql = "CREATE TABLE __tables__ (id integer NOT NULL PRIMARY KEY, name text NOT NULL, support_regions integer NOT NULL, confirmed integer NOT NULL, UNIQUE (name))"
  195. self.log(sql)
  196. self.conn.execute(sql)
  197. sql = "CREATE TABLE __columns__ (id integer NOT NULL PRIMARY KEY, name text NOT NULL, type text NOT NULL, table_id integer NOT_NULL, non_zero integer NOT NULL, confirmed integer NOT NULL, UNIQUE (name, table_id))"
  198. self.log(sql)
  199. self.conn.execute(sql)
  200. sql = "CREATE TABLE __tags__ (id integer NOT NULL PRIMARY KEY, name text NOT NULL UNIQUE, confirmed integer NOT NULL)"
  201. self.log(sql)
  202. self.conn.execute(sql)
  203. sql = "CREATE TABLE __files__ (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, path text NOT NULL, checksum integer NOT NULL, tag1 integer, tag2 integer, tag3 integer, confirmed integer NOT NULL, UNIQUE(path))"
  204. self.log(sql)
  205. self.conn.execute(sql)
  206. sql = "CREATE TABLE __regions__ (file_id integer NOT NULL, region_id integer NOT NULL, name text NOT NULL, begin integer NOT NULL, end integer NOT NULL, line_begin integer NOT NULL, line_end integer NOT NULL, cursor integer NOT NULL, group_id integer NOT NULL, checksum integer NOT NULL, PRIMARY KEY (file_id, region_id))"
  207. self.log(sql)
  208. self.conn.execute(sql)
  209. sql = "CREATE TABLE __markers__ (id integer NOT NULL PRIMARY KEY, file_id integer NOT NULL, begin integer NOT NULL, end integer NOT NULL, group_id integer NOT NULL)"
  210. self.log(sql)
  211. self.conn.execute(sql)
  212. except sqlite3.OperationalError as e:
  213. logging.debug("sqlite3.OperationalError: " + str(e))
  214. def create_table(self, table_name, support_regions = False):
  215. assert(self.read_only == False)
  216. sql = "SELECT * FROM __tables__ WHERE (name = '" + table_name + "'AND confirmed == 0)"
  217. self.log(sql)
  218. result = self.conn.execute(sql).fetchall()
  219. if len(result) != 0:
  220. sql = "UPDATE __tables__ SET confirmed = 1 WHERE (name = '" + table_name + "')"
  221. self.log(sql)
  222. self.conn.execute(sql)
  223. return
  224. sql = "CREATE TABLE '" + table_name + "' (file_id integer NOT NULL PRIMARY KEY)"
  225. if support_regions == True:
  226. sql = str("CREATE TABLE '" + table_name + "' (file_id integer NOT NULL, region_id integer NOT NULL, "
  227. + "PRIMARY KEY (file_id, region_id))")
  228. self.log(sql)
  229. self.conn.execute(sql)
  230. sql = "INSERT INTO __tables__ (name, support_regions, confirmed) VALUES ('" + table_name + "', '" + str(int(support_regions)) + "', 1)"
  231. self.log(sql)
  232. self.conn.execute(sql)
  233. def iterate_tables(self):
  234. sql = "SELECT * FROM __tables__ WHERE (confirmed = 1)"
  235. self.log(sql)
  236. result = self.conn.execute(sql).fetchall()
  237. for row in result:
  238. yield self.TableData(int(row["id"]), str(row["name"]), bool(row["support_regions"]))
  239. def check_table(self, table_name):
  240. sql = "SELECT * FROM __tables__ WHERE (name = '" + table_name + "' AND confirmed = 1)"
  241. self.log(sql)
  242. result = self.conn.execute(sql).fetchall()
  243. if len(result) == 0:
  244. return False
  245. return True
  246. def create_column(self, table_name, column_name, column_type, non_zero=False):
  247. assert(self.read_only == False)
  248. if column_type == None:
  249. logging.debug("Skipping column '" + column_name + "' creation for table '" + table_name + "'")
  250. return
  251. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  252. self.log(sql)
  253. table_id = self.conn.execute(sql).next()['id']
  254. sql = "SELECT * FROM __columns__ WHERE (table_id = '" + str(table_id) + "' AND name = '" + column_name + "' AND confirmed == 0)"
  255. self.log(sql)
  256. result = self.conn.execute(sql).fetchall()
  257. if len(result) != 0:
  258. sql = "UPDATE __columns__ SET confirmed = 1 WHERE (table_id = '" + str(table_id) + "' AND name = '" + column_name + "')"
  259. self.log(sql)
  260. self.conn.execute(sql)
  261. return
  262. sql = "ALTER TABLE '" + table_name + "' ADD COLUMN '" + column_name + "' " + column_type
  263. self.log(sql)
  264. self.conn.execute(sql)
  265. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  266. self.log(sql)
  267. table_id = self.conn.execute(sql).next()['id']
  268. sql = "INSERT INTO __columns__ (name, type, table_id, non_zero, confirmed) VALUES ('" + column_name + "', '" + column_type + "', '" + str(table_id) + "', '" + str(int(non_zero)) + "', 1)"
  269. self.log(sql)
  270. self.conn.execute(sql)
  271. def iterate_columns(self, table_name):
  272. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  273. self.log(sql)
  274. table_id = self.conn.execute(sql).next()['id']
  275. sql = "SELECT * FROM __columns__ WHERE (table_id = '" + str(table_id) + "' AND confirmed = 1)"
  276. self.log(sql)
  277. result = self.conn.execute(sql).fetchall()
  278. for row in result:
  279. yield self.ColumnData(int(row["id"]), str(row["name"]), str(row["type"]), bool(row["non_zero"]))
  280. def check_column(self, table_name, column_name):
  281. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  282. self.log(sql)
  283. table_id = self.conn.execute(sql).next()['id']
  284. sql = "SELECT * FROM __columns__ WHERE (table_id = '" + str(table_id) + "' AND name = '" + column_name + "' AND confirmed = 1)"
  285. self.log(sql)
  286. result = self.conn.execute(sql).fetchall()
  287. if len(result) == 0:
  288. return False
  289. return True
  290. def create_tag(self, tag_name):
  291. assert(self.read_only == False)
  292. sql = "SELECT * FROM __tags__ WHERE (name = '" + tag_name + "' AND confirmed == 0)"
  293. self.log(sql)
  294. result = self.conn.execute(sql).fetchall()
  295. if len(result) != 0:
  296. sql = "UPDATE __tags__ SET confirmed = 1 WHERE (name = '" + tag_name + "')"
  297. self.log(sql)
  298. self.conn.execute(sql)
  299. return
  300. sql = "INSERT INTO __tags__ (name, confirmed) VALUES ('" + tag_name + "', 1)"
  301. self.log(sql)
  302. self.conn.execute(sql)
  303. def iterate_tags(self):
  304. sql = "SELECT * FROM __tags__ WHERE (confirmed = 1)"
  305. self.log(sql)
  306. result = self.conn.execute(sql).fetchall()
  307. for row in result:
  308. yield self.TagData(int(row["id"]), str(row["name"]))
  309. def check_tag(self, tag_name):
  310. sql = "SELECT * FROM __tags__ WHERE (name = '" + tag_name + "' AND confirmed = 1)"
  311. self.log(sql)
  312. result = self.conn.execute(sql).fetchall()
  313. if len(result) == 0:
  314. return False
  315. return True
  316. def create_file(self, path, checksum):
  317. assert(self.read_only == False)
  318. path = self.InternalPathUtils().normalize_path(path)
  319. if self.is_cloned == True:
  320. sql = "SELECT * FROM __files__ WHERE (path = '" + path + "')"
  321. self.log(sql)
  322. result = self.conn.execute(sql).fetchall()
  323. if len(result) != 0:
  324. if result[0]['checksum'] == checksum:
  325. old_id = result[0]['id']
  326. sql = "UPDATE __files__ SET confirmed = 1 WHERE (id = " + str(old_id) +")"
  327. self.log(sql)
  328. self.conn.execute(sql)
  329. return old_id
  330. else:
  331. self.InternalCleanUpUtils().clean_up_file(self, result[0]['id'])
  332. sql = "INSERT OR REPLACE INTO __files__ (path, checksum, confirmed) VALUES (?, ?, 1)"
  333. column_data = [path, checksum]
  334. self.log(sql + " /with arguments: " + str(column_data))
  335. cur = self.conn.cursor()
  336. cur.execute(sql, column_data)
  337. self.InternalPathUtils().update_dirs(self, path=path)
  338. return cur.lastrowid
  339. def iterate_dircontent(self, path, include_subdirs = True, include_subfiles = True):
  340. self.InternalPathUtils().update_dirs(self)
  341. path = self.InternalPathUtils().normalize_path(path)
  342. cur_head = self.dirs
  343. valid = True
  344. if path != "":
  345. for head in self.InternalPathUtils().iterate_heads(path):
  346. if head not in cur_head.keys():
  347. # non existing directory
  348. valid = False
  349. else:
  350. cur_head = cur_head[head]
  351. basename = os.path.basename(path)
  352. if basename not in cur_head.keys() or cur_head[basename] == None:
  353. # do not exist or points to the file
  354. valid = False
  355. else:
  356. cur_head = cur_head[basename]
  357. if valid == True:
  358. for elem in cur_head.keys():
  359. if include_subdirs == True and cur_head[elem] != None:
  360. yield elem
  361. if include_subfiles == True and cur_head[elem] == None:
  362. yield elem
  363. def check_file(self, path):
  364. return self.get_file(path) != None
  365. def check_dir(self, path):
  366. for each in self.iterate_dircontent(path):
  367. each = each # used
  368. return True # there is at least one item
  369. return False
  370. def get_file(self, path):
  371. path = self.InternalPathUtils().normalize_path(path)
  372. result = self.select_rows("__files__", filters = [("path", "=", path), ("confirmed", "=", 1)])
  373. if len(result) == 0:
  374. return None
  375. assert(len(result) == 1)
  376. return self.FileData(result[0]['id'], result[0]['path'], result[0]['checksum'])
  377. def iterate_files(self):
  378. sql = "SELECT * FROM __files__ WHERE (confirmed = 1)"
  379. self.log(sql)
  380. for row in self.conn.execute(sql).fetchall():
  381. yield self.FileData(row['id'], row['path'], row['checksum'])
  382. def create_region(self, file_id, region_id, name, begin, end, line_begin, line_end, cursor, group, checksum):
  383. assert(self.read_only == False)
  384. sql = "INSERT OR REPLACE INTO __regions__ (file_id, region_id, name, begin, end, line_begin, line_end, cursor, group_id, checksum) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
  385. column_data = [file_id, region_id, name, begin, end, line_begin, line_end, cursor, group, checksum]
  386. self.log(sql + " /with arguments: " + str(column_data))
  387. cur = self.conn.cursor()
  388. cur.execute(sql, column_data)
  389. return cur.lastrowid
  390. def get_region(self, file_id, region_id):
  391. result = self.select_rows("__regions__", filters = [("file_id", "=", file_id), ("region_id", "=", region_id)])
  392. if len(result) == 0:
  393. return None
  394. return self.RegionData(result[0]['file_id'],
  395. result[0]['region_id'],
  396. result[0]['name'],
  397. result[0]['begin'],
  398. result[0]['end'],
  399. result[0]['line_begin'],
  400. result[0]['line_end'],
  401. result[0]['cursor'],
  402. result[0]['group_id'],
  403. result[0]['checksum'])
  404. def iterate_regions(self, file_id):
  405. for each in self.select_rows("__regions__", filters = [("file_id", "=", file_id)]):
  406. yield self.RegionData(each['file_id'],
  407. each['region_id'],
  408. each['name'],
  409. each['begin'],
  410. each['end'],
  411. each['line_begin'],
  412. each['line_end'],
  413. each['cursor'],
  414. each['group_id'],
  415. each['checksum'])
  416. def create_marker(self, file_id, begin, end, group):
  417. assert(self.read_only == False)
  418. sql = "INSERT OR REPLACE INTO __markers__ (file_id, begin, end, group_id) VALUES (?, ?, ?, ?)"
  419. column_data = [file_id, begin, end, group]
  420. self.log(sql + " /with arguments: " + str(column_data))
  421. cur = self.conn.cursor()
  422. cur.execute(sql, column_data)
  423. return cur.lastrowid
  424. def iterate_markers(self, file_id):
  425. for each in self.select_rows("__markers__", filters = [("file_id", "=", file_id)]):
  426. yield self.MarkerData(each['file_id'],
  427. each['name'],
  428. each['begin'],
  429. each['group_id'])
  430. def add_row(self, table_name, file_id, region_id, array_data):
  431. assert(self.read_only == False)
  432. column_names = "'file_id'"
  433. column_values = "?"
  434. column_data = [file_id]
  435. if region_id != None:
  436. column_names += ", 'region_id'"
  437. column_values += ", ?"
  438. column_data.append(region_id)
  439. useful_data = 0
  440. for each in array_data:
  441. column_names += ", '" + each[0] + "'"
  442. column_values += ", ?"
  443. column_data.append(each[1])
  444. useful_data += 1
  445. if useful_data == 0:
  446. return
  447. sql = "INSERT OR REPLACE INTO '" + table_name + "' (" + column_names + ") VALUES (" + column_values + ")"
  448. self.log(sql + " /with arguments: " + str(column_data))
  449. cur = self.conn.cursor()
  450. cur.execute(sql, column_data)
  451. return cur.lastrowid
  452. def select_rows(self, table_name, path_like = None, column_names = [], filters = []):
  453. safe_column_names = []
  454. for each in column_names:
  455. safe_column_names.append("'" + each + "'")
  456. return self.select_rows_unsafe(table_name, path_like = path_like, column_names = safe_column_names, filters = filters)
  457. def select_rows_unsafe(self, table_name, path_like = None, column_names = [], filters = []):
  458. path_like = self.InternalPathUtils().normalize_path(path_like)
  459. if self.conn == None:
  460. return []
  461. table_stmt = "'" + table_name + "'"
  462. what_stmt = ", ".join(column_names)
  463. if len(what_stmt) == 0:
  464. what_stmt = "*"
  465. elif path_like != None:
  466. what_stmt += ", '__files__'.'path', '__files__'.'id'"
  467. inner_stmt = ""
  468. if path_like != None:
  469. inner_stmt = " INNER JOIN '__files__' ON '__files__'.'id' = '" + table_name + "'.'file_id' "
  470. where_stmt = " "
  471. values = ()
  472. if len(filters) != 0:
  473. if filters[0][1] == 'IN':
  474. where_stmt = " WHERE (`" + filters[0][0] + "` " + filters[0][1] + " " + filters[0][2]
  475. else:
  476. where_stmt = " WHERE (`" + filters[0][0] + "` " + filters[0][1] + " ?"
  477. values = (filters[0][2],)
  478. for each in filters[1:]:
  479. if each[1] == 'IN':
  480. where_stmt += " AND `" + each[0] + "` " + each[1] + " " + each[2]
  481. else:
  482. where_stmt += " AND `" + each[0] + "` " + each[1] + " ?"
  483. values += (each[2], )
  484. if path_like != None:
  485. where_stmt += " AND '__files__'.'path' LIKE ?"
  486. values += (path_like, )
  487. where_stmt += ")"
  488. else:
  489. where_stmt = " WHERE '__files__'.'path' LIKE ?"
  490. values += (path_like, )
  491. sql = "SELECT " + what_stmt + " FROM " + table_stmt + inner_stmt + where_stmt
  492. self.log(sql + " /with arguments: " + str(values))
  493. return self.conn.execute(sql, values).fetchall()
  494. def get_row(self, table_name, file_id, region_id):
  495. selected = self.get_rows(table_name, file_id, region_id)
  496. # assures that only one row in database
  497. # if assertion happens, caller's intention is not right, use get_rows instead
  498. assert(len(selected) == 0 or len(selected) == 1)
  499. if len(selected) == 0:
  500. return None
  501. return selected[0]
  502. def get_rows(self, table_name, file_id, region_id):
  503. filters = [("file_id", '=', file_id)]
  504. if region_id != None:
  505. filters.append(("region_id", '=', region_id))
  506. return self.select_rows(table_name, filters=filters)
  507. def aggregate_rows(self, table_name, path_like = None, column_names = None, filters = []):
  508. if column_names == None:
  509. column_names = []
  510. for column in self.iterate_columns(table_name):
  511. column_names.append(column.name)
  512. if len(column_names) == 0:
  513. # it is possible that a table does not have meanfull columns
  514. return {}
  515. total_column_names = []
  516. for column_name in column_names:
  517. for func in ['max', 'min', 'avg', 'total']:
  518. total_column_names.append(func + "('" + table_name + "'.'" + column_name + "') AS " + "'" + column_name + "_" + func + "'")
  519. data = self.select_rows_unsafe(table_name, path_like = path_like, column_names = total_column_names, filters = filters)
  520. assert(len(data) == 1)
  521. result = {}
  522. for column_name in column_names:
  523. result[column_name] = {}
  524. for func in ['max', 'min', 'avg', 'total']:
  525. result[column_name][func] = data[0][column_name + "_" + func]
  526. return result
  527. def log(self, sql):
  528. #import traceback
  529. #traceback.print_stack()
  530. logging.debug("[" + str(self.id) + "] Executing query: " + sql)