sqlite.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556
  1. #
  2. # Metrix++, Copyright 2009-2013, Metrix++ Project
  3. # Link: http://swi.sourceforge.net
  4. #
  5. # This file is a part of Metrix++ Tool.
  6. #
  7. # Metrix++ is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation, version 3 of the License.
  10. #
  11. # Metrix++ is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
  18. #
  19. import sqlite3
  20. import re
  21. import os.path
  22. import logging
  23. import itertools
  24. import shutil
  25. class Database(object):
  26. last_used_id = 0
  27. version = "1.0"
  28. class TableData(object):
  29. def __init__(self, table_id, name, support_regions):
  30. self.id = table_id
  31. self.name = name
  32. self.support_regions = support_regions
  33. class ColumnData(object):
  34. def __init__(self, column_id, name, sql_type):
  35. self.id = column_id
  36. self.name = name
  37. self.sql_type = sql_type
  38. class FileData(object):
  39. def __init__(self, file_id, path, checksum):
  40. self.id = file_id
  41. self.path = path
  42. self.checksum = checksum
  43. class RegionData(object):
  44. def __init__(self, file_id, region_id, name, begin, end, line_begin, line_end, cursor, group, checksum):
  45. self.file_id = file_id
  46. self.region_id = region_id
  47. self.name = name
  48. self.begin = begin
  49. self.end = end
  50. self.line_begin = line_begin
  51. self.line_end = line_end
  52. self.cursor = cursor
  53. self.group = group
  54. self.checksum = checksum
  55. class MarkerData(object):
  56. def __init__(self, file_id, begin, end, group):
  57. self.file_id = file_id
  58. self.begin = begin
  59. self.end = end
  60. self.group = group
  61. def __init__(self):
  62. self.read_only = False
  63. self.conn = None
  64. self.dirs = None
  65. self.is_cloned = False
  66. self.last_used_id += 1
  67. self.id = self.last_used_id
  68. def __del__(self):
  69. if self.conn != None:
  70. if self.is_cloned == True:
  71. logging.debug("Cleaning up database file")
  72. self.InternalCleanUpUtils().clean_up_not_confirmed(self)
  73. logging.debug("Committing database file")
  74. self.conn.commit()
  75. class InternalCleanUpUtils(object):
  76. def clean_up_not_confirmed(self, db_loader):
  77. sql = "SELECT * FROM __tables__ WHERE (confirmed = 0)"
  78. db_loader.log(sql)
  79. for table in db_loader.conn.execute(sql).fetchall():
  80. sql = "DELETE FROM __columns__ WHERE table_id = '" + str(table['id']) + "'"
  81. db_loader.log(sql)
  82. db_loader.conn.execute(sql)
  83. sql = "DELETE FROM __tables__ WHERE id = '" + str(table['id']) + "'"
  84. db_loader.log(sql)
  85. db_loader.conn.execute(sql)
  86. sql = "DROP TABLE '" + table['name'] + "'"
  87. db_loader.log(sql)
  88. db_loader.conn.execute(sql)
  89. sql = "SELECT __columns__.name AS column_name, __tables__.name AS table_name, __columns__.id AS column_id FROM __columns__, __tables__ WHERE (__columns__.confirmed = 0 AND __columns__.table_id = __tables__.id)"
  90. db_loader.log(sql)
  91. for column in db_loader.conn.execute(sql).fetchall():
  92. logging.warn("New database file inherits useless column: '" + column['table_name'] + "'.'" + column['column_name'] + "'")
  93. sql = "DELETE FROM __columns__ WHERE id = '" + str(column['column_id']) + "'"
  94. db_loader.log(sql)
  95. db_loader.conn.execute(sql)
  96. sql = "UPDATE '" + column['table_name'] + "' SET '" + column['column_name'] + "' = NULL"
  97. db_loader.log(sql)
  98. db_loader.conn.execute(sql)
  99. self.clean_up_file(db_loader)
  100. def clean_up_file(self, db_loader, file_id = None):
  101. sql = "SELECT * FROM __tables__"
  102. db_loader.log(sql)
  103. for table in itertools.chain(db_loader.conn.execute(sql).fetchall(), [{'name':'__regions__'}, {'name':'__markers__'}]):
  104. sql = ""
  105. if file_id == None:
  106. sql = "DELETE FROM '" + table['name'] + "' WHERE file_id IN (SELECT __files__.id FROM __files__ WHERE __files__.confirmed = 0)"
  107. else:
  108. sql = "DELETE FROM '" + table['name'] + "' WHERE (file_id = " + str(file_id) + ")"
  109. db_loader.log(sql)
  110. db_loader.conn.execute(sql)
  111. class InternalPathUtils(object):
  112. def iterate_heads(self, path):
  113. dirs = []
  114. head = os.path.dirname(path)
  115. last_head = None # to process Windows drives
  116. while (head != "" and last_head != head):
  117. dirs.append(os.path.basename(head))
  118. last_head = head
  119. head = os.path.dirname(head)
  120. dirs.reverse()
  121. for each in dirs:
  122. yield each
  123. def normalize_path(self, path):
  124. if path == None:
  125. return None
  126. return re.sub(r'''[\\]''', "/", path)
  127. def update_dirs(self, db_loader, path = None):
  128. if db_loader.dirs == None:
  129. if path == None:
  130. db_loader.dirs = {} # initial construction
  131. else:
  132. return # avoid useless cache updates
  133. elif path == None:
  134. return # avoid multiple initial constructions
  135. path = self.normalize_path(path)
  136. rows = None
  137. if path == None:
  138. sql = "SELECT * FROM __files__"
  139. db_loader.log(sql)
  140. rows = db_loader.conn.execute(sql).fetchall()
  141. else:
  142. rows = [{"path": path}]
  143. for row in rows:
  144. cur_head = db_loader.dirs
  145. for dir_name in self.iterate_heads(row["path"]):
  146. if dir_name not in cur_head.keys():
  147. cur_head[dir_name] = {}
  148. cur_head = cur_head[dir_name]
  149. cur_head[os.path.basename(row["path"])] = None
  150. def create(self, file_name, clone_from = None):
  151. if clone_from != None:
  152. self.is_cloned = True
  153. logging.debug("Cloning database file: " + clone_from)
  154. shutil.copy2(clone_from, file_name)
  155. logging.debug("Connecting database file: " + file_name)
  156. self.conn = sqlite3.connect(file_name)
  157. self.conn.row_factory = sqlite3.Row
  158. self.read_only = False
  159. sql = "UPDATE __tables__ SET confirmed = 0"
  160. self.log(sql)
  161. self.conn.execute(sql)
  162. sql = "UPDATE __columns__ SET confirmed = 0"
  163. self.log(sql)
  164. self.conn.execute(sql)
  165. sql = "UPDATE __files__ SET confirmed = 0"
  166. self.log(sql)
  167. self.conn.execute(sql)
  168. else:
  169. self.connect(file_name)
  170. def connect(self, file_name, read_only = False):
  171. logging.debug("Connecting database file: " + file_name)
  172. self.conn = sqlite3.connect(file_name)
  173. self.conn.row_factory = sqlite3.Row
  174. self.read_only = read_only
  175. if self.read_only == False:
  176. try:
  177. sql = "CREATE TABLE __info__ (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, property text NOT NULL, value text, UNIQUE (property) ON CONFLICT REPLACE)"
  178. self.log(sql)
  179. self.conn.execute(sql)
  180. sql = "INSERT INTO __info__ (property, value) VALUES ('version', '" + self.version + "')"
  181. self.log(sql)
  182. self.conn.execute(sql)
  183. sql = "CREATE TABLE __tables__ (id integer NOT NULL PRIMARY KEY, name text NOT NULL, support_regions integer NOT NULL, confirmed integer NOT NULL, UNIQUE (name))"
  184. self.log(sql)
  185. self.conn.execute(sql)
  186. sql = "CREATE TABLE __columns__ (id integer NOT NULL PRIMARY KEY, name text NOT NULL, type text NOT NULL, table_id integer NOT_NULL, confirmed integer NOT NULL, UNIQUE (name, table_id))"
  187. self.log(sql)
  188. self.conn.execute(sql)
  189. sql = "CREATE TABLE __files__ (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, path text NOT NULL, checksum integer NOT NULL, confirmed integer NOT NULL, UNIQUE(path))"
  190. self.log(sql)
  191. self.conn.execute(sql)
  192. sql = "CREATE TABLE __regions__ (file_id integer NOT NULL, region_id integer NOT NULL, name text NOT NULL, begin integer NOT NULL, end integer NOT NULL, line_begin integer NOT NULL, line_end integer NOT NULL, cursor integer NOT NULL, group_id integer NOT NULL, checksum integer NOT NULL, PRIMARY KEY (file_id, region_id))"
  193. self.log(sql)
  194. self.conn.execute(sql)
  195. sql = "CREATE TABLE __markers__ (id integer NOT NULL PRIMARY KEY, file_id integer NOT NULL, begin integer NOT NULL, end integer NOT NULL, group_id integer NOT NULL)"
  196. self.log(sql)
  197. self.conn.execute(sql)
  198. except sqlite3.OperationalError as e:
  199. logging.debug("sqlite3.OperationalError: " + str(e))
  200. def create_table(self, table_name, support_regions = False):
  201. assert(self.read_only == False)
  202. sql = "SELECT * FROM __tables__ WHERE (name = '" + table_name + "'AND confirmed == 0)"
  203. self.log(sql)
  204. result = self.conn.execute(sql).fetchall()
  205. if len(result) != 0:
  206. sql = "UPDATE __tables__ SET confirmed = 1 WHERE (name = '" + table_name + "')"
  207. self.log(sql)
  208. self.conn.execute(sql)
  209. return
  210. sql = "CREATE TABLE '" + table_name + "' (file_id integer NOT NULL PRIMARY KEY)"
  211. if support_regions == True:
  212. sql = str("CREATE TABLE '" + table_name + "' (file_id integer NOT NULL, region_id integer NOT NULL, "
  213. + "PRIMARY KEY (file_id, region_id))")
  214. self.log(sql)
  215. self.conn.execute(sql)
  216. sql = "INSERT INTO __tables__ (name, support_regions, confirmed) VALUES ('" + table_name + "', '" + str(int(support_regions)) + "', 1)"
  217. self.log(sql)
  218. self.conn.execute(sql)
  219. def iterate_tables(self):
  220. sql = "SELECT * FROM __tables__ WHERE (confirmed = 1)"
  221. self.log(sql)
  222. result = self.conn.execute(sql).fetchall()
  223. for row in result:
  224. yield self.TableData(int(row["id"]), str(row["name"]), bool(row["support_regions"]))
  225. def check_table(self, table_name):
  226. sql = "SELECT * FROM __tables__ WHERE (name = '" + table_name + "' AND confirmed = 1)"
  227. self.log(sql)
  228. result = self.conn.execute(sql).fetchall()
  229. if len(result) == 0:
  230. return False
  231. return True
  232. def create_column(self, table_name, column_name, column_type):
  233. assert(self.read_only == False)
  234. if column_type == None:
  235. logging.debug("Skipping column '" + column_name + "' creation for table '" + table_name + "'")
  236. return
  237. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  238. self.log(sql)
  239. table_id = self.conn.execute(sql).next()['id']
  240. sql = "SELECT * FROM __columns__ WHERE (table_id = '" + str(table_id) + "' AND name = '" + column_name + "' AND confirmed == 0)"
  241. self.log(sql)
  242. result = self.conn.execute(sql).fetchall()
  243. if len(result) != 0:
  244. sql = "UPDATE __columns__ SET confirmed = 1 WHERE (table_id = '" + str(table_id) + "' AND name = '" + column_name + "')"
  245. self.log(sql)
  246. self.conn.execute(sql)
  247. return
  248. sql = "ALTER TABLE '" + table_name + "' ADD COLUMN '" + column_name + "' " + column_type
  249. self.log(sql)
  250. self.conn.execute(sql)
  251. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  252. self.log(sql)
  253. table_id = self.conn.execute(sql).next()['id']
  254. sql = "INSERT INTO __columns__ (name, type, table_id, confirmed) VALUES ('" + column_name + "', '" + column_type + "', '" + str(table_id) + "', 1)"
  255. self.log(sql)
  256. self.conn.execute(sql)
  257. def iterate_columns(self, table_name):
  258. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  259. self.log(sql)
  260. table_id = self.conn.execute(sql).next()['id']
  261. sql = "SELECT * FROM __columns__ WHERE (table_id = '" + str(table_id) + "' AND confirmed = 1)"
  262. self.log(sql)
  263. result = self.conn.execute(sql).fetchall()
  264. for row in result:
  265. yield self.ColumnData(int(row["id"]), str(row["name"]), str(row["type"]))
  266. def check_column(self, table_name, column_name):
  267. sql = "SELECT id FROM __tables__ WHERE (name = '" + table_name + "')"
  268. self.log(sql)
  269. table_id = self.conn.execute(sql).next()['id']
  270. sql = "SELECT * FROM __columns__ WHERE (table_id = '" + str(table_id) + "' AND name = '" + column_name + "' AND confirmed = 1)"
  271. self.log(sql)
  272. result = self.conn.execute(sql).fetchall()
  273. if len(result) == 0:
  274. return False
  275. return True
  276. def create_file(self, path, checksum):
  277. assert(self.read_only == False)
  278. path = self.InternalPathUtils().normalize_path(path)
  279. if self.is_cloned == True:
  280. sql = "SELECT * FROM __files__ WHERE (path = '" + path + "')"
  281. self.log(sql)
  282. result = self.conn.execute(sql).fetchall()
  283. if len(result) != 0:
  284. if result[0]['checksum'] == checksum:
  285. old_id = result[0]['id']
  286. sql = "UPDATE __files__ SET confirmed = 1 WHERE (id = " + str(old_id) +")"
  287. self.log(sql)
  288. self.conn.execute(sql)
  289. return old_id
  290. else:
  291. self.InternalCleanUpUtils().clean_up_file(self, result[0]['id'])
  292. sql = "INSERT OR REPLACE INTO __files__ (path, checksum, confirmed) VALUES (?, ?, 1)"
  293. column_data = [path, checksum]
  294. self.log(sql + " /with arguments: " + str(column_data))
  295. cur = self.conn.cursor()
  296. cur.execute(sql, column_data)
  297. self.InternalPathUtils().update_dirs(self, path=path)
  298. return cur.lastrowid
  299. def iterate_dircontent(self, path, include_subdirs = True, include_subfiles = True):
  300. self.InternalPathUtils().update_dirs(self)
  301. path = self.InternalPathUtils().normalize_path(path)
  302. cur_head = self.dirs
  303. valid = True
  304. if path != "":
  305. for head in self.InternalPathUtils().iterate_heads(path):
  306. if head not in cur_head.keys():
  307. # non existing directory
  308. valid = False
  309. else:
  310. cur_head = cur_head[head]
  311. basename = os.path.basename(path)
  312. if basename not in cur_head.keys() or cur_head[basename] == None:
  313. # do not exist or points to the file
  314. valid = False
  315. else:
  316. cur_head = cur_head[basename]
  317. if valid == True:
  318. for elem in cur_head.keys():
  319. if include_subdirs == True and cur_head[elem] != None:
  320. yield elem
  321. if include_subfiles == True and cur_head[elem] == None:
  322. yield elem
  323. def check_file(self, path):
  324. return self.get_file(path) != None
  325. def check_dir(self, path):
  326. for each in self.iterate_dircontent(path):
  327. each = each # used
  328. return True # there is at least one item
  329. return False
  330. def get_file(self, path):
  331. path = self.InternalPathUtils().normalize_path(path)
  332. result = self.select_rows("__files__", filters = [("path", "=", path), ("confirmed", "=", 1)])
  333. if len(result) == 0:
  334. return None
  335. assert(len(result) == 1)
  336. return self.FileData(result[0]['id'], result[0]['path'], result[0]['checksum'])
  337. def iterate_files(self):
  338. sql = "SELECT * FROM __files__ WHERE (confirmed = 1)"
  339. self.log(sql)
  340. for row in self.conn.execute(sql).fetchall():
  341. yield self.FileData(row['id'], row['path'], row['checksum'])
  342. def create_region(self, file_id, region_id, name, begin, end, line_begin, line_end, cursor, group, checksum):
  343. assert(self.read_only == False)
  344. sql = "INSERT OR REPLACE INTO __regions__ (file_id, region_id, name, begin, end, line_begin, line_end, cursor, group_id, checksum) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
  345. column_data = [file_id, region_id, name, begin, end, line_begin, line_end, cursor, group, checksum]
  346. self.log(sql + " /with arguments: " + str(column_data))
  347. cur = self.conn.cursor()
  348. cur.execute(sql, column_data)
  349. return cur.lastrowid
  350. def get_region(self, file_id, region_id):
  351. result = self.select_rows("__regions__", filters = [("file_id", "=", file_id), ("region_id", "=", region_id)])
  352. if len(result) == 0:
  353. return None
  354. return self.RegionData(result[0]['file_id'],
  355. result[0]['region_id'],
  356. result[0]['name'],
  357. result[0]['begin'],
  358. result[0]['end'],
  359. result[0]['line_begin'],
  360. result[0]['line_end'],
  361. result[0]['cursor'],
  362. result[0]['group_id'],
  363. result[0]['checksum'])
  364. def iterate_regions(self, file_id):
  365. for each in self.select_rows("__regions__", filters = [("file_id", "=", file_id)]):
  366. yield self.RegionData(each['file_id'],
  367. each['region_id'],
  368. each['name'],
  369. each['begin'],
  370. each['end'],
  371. each['line_begin'],
  372. each['line_end'],
  373. each['cursor'],
  374. each['group_id'],
  375. each['checksum'])
  376. def create_marker(self, file_id, begin, end, group):
  377. assert(self.read_only == False)
  378. sql = "INSERT OR REPLACE INTO __markers__ (file_id, begin, end, group_id) VALUES (?, ?, ?, ?)"
  379. column_data = [file_id, begin, end, group]
  380. self.log(sql + " /with arguments: " + str(column_data))
  381. cur = self.conn.cursor()
  382. cur.execute(sql, column_data)
  383. return cur.lastrowid
  384. def iterate_markers(self, file_id):
  385. for each in self.select_rows("__markers__", filters = [("file_id", "=", file_id)]):
  386. yield self.MarkerData(each['file_id'],
  387. each['name'],
  388. each['begin'],
  389. each['group_id'])
  390. def add_row(self, table_name, file_id, region_id, array_data):
  391. assert(self.read_only == False)
  392. column_names = "'file_id'"
  393. column_values = "?"
  394. column_data = [file_id]
  395. if region_id != None:
  396. column_names += ", 'region_id'"
  397. column_values += ", ?"
  398. column_data.append(region_id)
  399. useful_data = 0
  400. for each in array_data:
  401. column_names += ", '" + each[0] + "'"
  402. column_values += ", ?"
  403. column_data.append(each[1])
  404. useful_data += 1
  405. if useful_data == 0:
  406. return
  407. sql = "INSERT OR REPLACE INTO '" + table_name + "' (" + column_names + ") VALUES (" + column_values + ")"
  408. self.log(sql + " /with arguments: " + str(column_data))
  409. cur = self.conn.cursor()
  410. cur.execute(sql, column_data)
  411. return cur.lastrowid
  412. def select_rows(self, table_name, path_like = None, column_names = [], filters = []):
  413. safe_column_names = []
  414. for each in column_names:
  415. safe_column_names.append("'" + each + "'")
  416. return self.select_rows_unsafe(table_name, path_like = path_like, column_names = safe_column_names, filters = filters)
  417. def select_rows_unsafe(self, table_name, path_like = None, column_names = [], filters = []):
  418. path_like = self.InternalPathUtils().normalize_path(path_like)
  419. if self.conn == None:
  420. return []
  421. table_stmt = "'" + table_name + "'"
  422. what_stmt = ", ".join(column_names)
  423. if len(what_stmt) == 0:
  424. what_stmt = "*"
  425. elif path_like != None:
  426. what_stmt += ", '__files__'.'path', '__files__'.'id'"
  427. inner_stmt = ""
  428. if path_like != None:
  429. inner_stmt = " INNER JOIN '__files__' ON '__files__'.'id' = '" + table_name + "'.'file_id' "
  430. where_stmt = " "
  431. values = ()
  432. if len(filters) != 0:
  433. if filters[0][1] == 'IN':
  434. where_stmt = " WHERE (`" + filters[0][0] + "` " + filters[0][1] + " " + filters[0][2]
  435. else:
  436. where_stmt = " WHERE (`" + filters[0][0] + "` " + filters[0][1] + " ?"
  437. values = (filters[0][2],)
  438. for each in filters[1:]:
  439. if each[1] == 'IN':
  440. where_stmt += " AND `" + each[0] + "` " + each[1] + " " + each[2]
  441. else:
  442. where_stmt += " AND `" + each[0] + "` " + each[1] + " ?"
  443. values += (each[2], )
  444. if path_like != None:
  445. where_stmt += " AND '__files__'.'path' LIKE ?"
  446. values += (path_like, )
  447. where_stmt += ")"
  448. else:
  449. where_stmt = " WHERE '__files__'.'path' LIKE ?"
  450. values += (path_like, )
  451. sql = "SELECT " + what_stmt + " FROM " + table_stmt + inner_stmt + where_stmt
  452. self.log(sql + " /with arguments: " + str(values))
  453. return self.conn.execute(sql, values).fetchall()
  454. def get_row(self, table_name, file_id, region_id):
  455. selected = self.get_rows(table_name, file_id, region_id)
  456. # assures that only one row in database
  457. # if assertion happens, caller's intention is not right, use get_rows instead
  458. assert(len(selected) == 0 or len(selected) == 1)
  459. if len(selected) == 0:
  460. return None
  461. return selected[0]
  462. def get_rows(self, table_name, file_id, region_id):
  463. filters = [("file_id", '=', file_id)]
  464. if region_id != None:
  465. filters.append(("region_id", '=', region_id))
  466. return self.select_rows(table_name, filters=filters)
  467. def aggregate_rows(self, table_name, path_like = None, column_names = None, filters = []):
  468. if column_names == None:
  469. column_names = []
  470. for column in self.iterate_columns(table_name):
  471. column_names.append(column.name)
  472. if len(column_names) == 0:
  473. # it is possible that a table does not have meanfull columns
  474. return {}
  475. total_column_names = []
  476. for column_name in column_names:
  477. for func in ['max', 'min', 'avg', 'total']:
  478. total_column_names.append(func + "('" + table_name + "'.'" + column_name + "') AS " + "'" + column_name + "_" + func + "'")
  479. data = self.select_rows_unsafe(table_name, path_like = path_like, column_names = total_column_names, filters = filters)
  480. assert(len(data) == 1)
  481. result = {}
  482. for column_name in column_names:
  483. result[column_name] = {}
  484. for func in ['max', 'min', 'avg', 'total']:
  485. result[column_name][func] = data[0][column_name + "_" + func]
  486. return result
  487. def log(self, sql):
  488. #import traceback
  489. #traceback.print_stack()
  490. logging.debug("[" + str(self.id) + "] Executing query: " + sql)