123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203 |
- #
- # Metrix++, Copyright 2009-2013, Metrix++ Project
- # Link: http://metrixplusplus.sourceforge.net
- #
- # This file is a part of Metrix++ Tool.
- #
- # Metrix++ is free software: you can redistribute it and/or modify
- # it under the terms of the GNU General Public License as published by
- # the Free Software Foundation, version 3 of the License.
- #
- # Metrix++ is distributed in the hope that it will be useful,
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- # GNU General Public License for more details.
- #
- # You should have received a copy of the GNU General Public License
- # along with Metrix++. If not, see <http://www.gnu.org/licenses/>.
- #
- import os.path
- import sys
- import mpp.internal.dbwrap
- import mpp.internal.api_impl
- ##############################################################################
- #
- #
- #
- ##############################################################################
- class Data(object):
- def __init__(self):
- self.data = {}
-
- def get_data(self, namespace, field):
- if namespace not in self.data.keys():
- return None
- if field not in self.data[namespace].keys():
- return None
- return self.data[namespace][field]
- def set_data(self, namespace, field, value):
- if namespace not in self.data:
- self.data[namespace] = {}
- self.data[namespace][field] = value
- def iterate_namespaces(self):
- for namespace in self.data.keys():
- yield namespace
-
- def iterate_fields(self, namespace):
- for field in self.data[namespace].keys():
- yield (field, self.data[namespace][field])
- def get_data_tree(self, namespaces=None):
- return self.data
- def __repr__(self):
- return object.__repr__(self) + " with data " + self.data.__repr__()
- class LoadableData(Data):
-
- def __init__(self, loader, file_id, region_id):
- Data.__init__(self)
- self.loader = loader
- self.file_id = file_id
- self.region_id = region_id
- self.loaded_namespaces = []
- self.changed_namespaces = []
- def load_namespace(self, namespace):
- namespace_obj = self.loader.get_namespace(namespace)
- if namespace_obj == None:
- return
- regions_supported = namespace_obj.are_regions_supported()
- if ((self.region_id == None and regions_supported == True) or
- (self.region_id != None and regions_supported == False)):
- return
- row = self.loader.db.get_row(namespace, self.file_id, self.region_id)
- if row == None:
- return
- for column_name in row.keys():
- try:
- packager = namespace_obj._get_field_packager(column_name)
- except mpp.internal.api_impl.PackagerError:
- continue
- if row[column_name] == None:
- continue
- Data.set_data(self, namespace, column_name, packager.unpack(row[column_name]))
-
- def set_data(self, namespace, field, value):
- if namespace not in self.changed_namespaces:
- self.changed_namespaces.append(namespace)
- return Data.set_data(self, namespace, field, value)
- def get_data(self, namespace, field):
- if namespace not in self.loaded_namespaces:
- self.loaded_namespaces.append(namespace)
- self.load_namespace(namespace)
- return Data.get_data(self, namespace, field)
-
- def is_namespace_updated(self, namespace):
- return namespace in self.changed_namespaces
- def is_namespace_loaded(self, namespace):
- return namespace in self.loaded_namespaces
- def get_data_tree(self, namespaces=None):
- if namespaces == None:
- namespaces = self.loader.iterate_namespace_names()
- for each in namespaces:
- self.load_namespace(each)
- return Data.get_data_tree(self)
-
- class Region(LoadableData):
-
- class T(object):
- NONE = 0x00
- GLOBAL = 0x01
- CLASS = 0x02
- STRUCT = 0x04
- NAMESPACE = 0x08
- FUNCTION = 0x10
- INTERFACE = 0x20
- ANY = 0xFF
-
- def to_str(self, group):
- if group == self.NONE:
- return "none"
- elif group == self.GLOBAL:
- return "global"
- elif group == self.CLASS:
- return "class"
- elif group == self.STRUCT:
- return "struct"
- elif group == self.NAMESPACE:
- return "namespace"
- elif group == self.FUNCTION:
- return "function"
- elif group == self.INTERFACE:
- return "interface"
- else:
- assert(False)
- def __init__(self, loader, file_id, region_id, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum):
- LoadableData.__init__(self, loader, file_id, region_id)
- self.name = region_name
- self.begin = offset_begin
- self.end = offset_end
- self.line_begin = line_begin
- self.line_end = line_end
- self.cursor = cursor_line
- self.group = group
- self.checksum = checksum
-
- self.children = []
-
- def get_id(self):
- return self.region_id
- def get_name(self):
- return self.name
- def get_offset_begin(self):
- return self.begin
- def get_offset_end(self):
- return self.end
- def get_line_begin(self):
- return self.line_begin
- def get_line_end(self):
- return self.line_end
- def get_cursor(self):
- return self.cursor
- def get_type(self):
- return self.group
- def get_checksum(self):
- return self.checksum
-
- def iterate_subregion_ids(self):
- return self.children
- def _register_subregion_id(self, child_id):
- self.children.append(child_id)
- class Marker(object):
- class T(object):
- NONE = 0x00
- COMMENT = 0x01
- STRING = 0x02
- PREPROCESSOR = 0x04
- CODE = 0x08
- ANY = 0xFF
- def to_str(self, group):
- if group == self.NONE:
- return "none"
- elif group == self.COMMENT:
- return "comment"
- elif group == self.STRING:
- return "string"
- elif group == self.PREPROCESSOR:
- return "preprocessor"
- elif group == self.CODE:
- return "code"
- else:
- assert(False)
-
- def __init__(self, offset_begin, offset_end, group):
- self.begin = offset_begin
- self.end = offset_end
- self.group = group
-
- def get_offset_begin(self):
- return self.begin
- def get_offset_end(self):
- return self.end
- def get_type(self):
- return self.group
- class FileData(LoadableData):
-
- def __init__(self, loader, path, file_id, checksum, content):
- LoadableData.__init__(self, loader, file_id, None)
- self.path = path
- self.checksum = checksum
- self.content = content
- self.regions = None
- self.markers = None
- self.loader = loader
- self.loading_tmp = []
-
- def get_id(self):
- return self.file_id
- def get_path(self):
- return self.path
- def get_checksum(self):
- return self.checksum
-
- def get_content(self):
- return self.content
- def _internal_append_region(self, region):
- # here we apply some magic - we rely on special ordering of coming regions,
- # which is supported by code parsers
- prev_id = None
- while True:
- if len(self.loading_tmp) == 0:
- break
- prev_id = self.loading_tmp.pop()
- if self.get_region(prev_id).get_offset_end() > region.get_offset_begin():
- self.loading_tmp.append(prev_id) # return back
- break
- self.loading_tmp.append(region.get_id())
- if prev_id != None:
- self.get_region(prev_id)._register_subregion_id(region.get_id())
- self.regions.append(region)
- def load_regions(self):
- if self.regions == None:
- self.regions = []
- for each in self.loader.db.iterate_regions(self.get_id()):
- self._internal_append_region(Region(self.loader,
- self.get_id(),
- each.region_id,
- each.name,
- each.begin,
- each.end,
- each.line_begin,
- each.line_end,
- each.cursor,
- each.group,
- each.checksum))
- assert(len(self.regions) == each.region_id)
-
- def add_region(self, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum):
- if self.regions == None:
- # # do not load regions and markers in time of collection
- # if region is added first by parser, set markers to empty list as well
- # because if there are no markers in a file, it forces loading of markers
- # during iterate_markers call
- self.regions = []
- self.markers = []
- new_id = len(self.regions) + 1
- self._internal_append_region(Region(self.loader, self.get_id(), new_id, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum))
- self.loader.db.create_region(self.file_id, new_id, region_name, offset_begin, offset_end, line_begin, line_end, cursor_line, group, checksum)
- return new_id
-
- def get_region(self, region_id):
- self.load_regions()
- return self.regions[region_id - 1]
-
- def iterate_regions(self, filter_group = Region.T.ANY):
- self.load_regions()
- for each in self.regions:
- if each.group & filter_group:
- yield each
- def are_regions_loaded(self):
- return self.regions != None
- def load_markers(self):
- if self.markers == None:
- # TODO add assert in case of an attempt to load data during collection
- assert(False) # TODO not used in post-processing tools for while, need to be fixed
- self.markers = []
- for each in self.loader.db.iterate_markers(self.get_id()):
- self.markers.append(Marker(each.begin, each.end, each.group))
-
- def add_marker(self, offset_begin, offset_end, group):
- if self.markers == None:
- # # do not load regions and markers in time of collection
- # if marker is added first by parser, set regions to empty list as well
- # because if there are no regions in a file, it forces loading of regions
- # during iterate_regions call
- self.regions = []
- self.markers = []
- self.markers.append(Marker(offset_begin, offset_end, group))
- # TODO drop collecting markers, it is faster to double parse
- # it is not the same with regions, it is faster to load regions
- # on iterative re-run
- #self.loader.db.create_marker(self.file_id, offset_begin, offset_end, group)
-
- def iterate_markers(self, filter_group = Marker.T.ANY,
- region_id = None, exclude_children = True, merge = False):
- self.load_markers()
-
- # merged markers
- if merge == True:
- next_marker = None
- for marker in self.iterate_markers(filter_group, region_id, exclude_children, merge = False):
- if next_marker != None:
- if next_marker.get_offset_end() == marker.get_offset_begin():
- # sequential markers
- next_marker = Marker(next_marker.get_offset_begin(),
- marker.get_offset_end(),
- next_marker.get_type() | marker.get_type())
- else:
- yield next_marker
- next_marker = None
- if next_marker == None:
- next_marker = Marker(marker.get_offset_begin(),
- marker.get_offset_end(),
- marker.get_type())
- if next_marker != None:
- yield next_marker
-
- # all markers per file
- elif region_id == None:
- next_code_marker_start = 0
- for marker in self.markers:
- if Marker.T.CODE & filter_group and next_code_marker_start < marker.get_offset_begin():
- yield Marker(next_code_marker_start, marker.get_offset_begin(), Marker.T.CODE)
- if marker.group & filter_group:
- yield marker
- next_code_marker_start = marker.get_offset_end()
- if Marker.T.CODE & filter_group and next_code_marker_start < len(self.get_content()):
- yield Marker(next_code_marker_start, len(self.get_content()), Marker.T.CODE)
- # markers per region
- else:
- region = self.get_region(region_id)
- if region != None:
-
- # code parsers and database know about non-code markers
- # clients want to iterate code as markers as well
- # so, we embed code markers in run-time
- class CodeMarker(Marker):
- pass
-
- # cache markers for all regions if it does not exist
- if hasattr(region, '_markers_list') == False:
- # subroutine to populate _markers_list attribute
- # _markers_list does include code markers
- def cache_markers_list_rec(data, region_id, marker_start_ind, next_code_marker_start):
- region = data.get_region(region_id)
- region._markers_list = []
- region._first_marker_ind = marker_start_ind
- #next_code_marker_start = region.get_offset_begin()
-
- for sub_id in region.iterate_subregion_ids():
- subregion = data.get_region(sub_id)
- # cache all markers before the subregion
- while len(data.markers) > marker_start_ind and \
- subregion.get_offset_begin() > data.markers[marker_start_ind].get_offset_begin():
- if next_code_marker_start < data.markers[marker_start_ind].get_offset_begin():
- # append code markers coming before non-code marker
- region._markers_list.append(CodeMarker(next_code_marker_start,
- data.markers[marker_start_ind].get_offset_begin(),
- Marker.T.CODE))
- next_code_marker_start = data.markers[marker_start_ind].get_offset_end()
- region._markers_list.append(marker_start_ind)
- marker_start_ind += 1
-
- # cache all code markers before the subregion but after the last marker
- if next_code_marker_start < subregion.get_offset_begin():
- region._markers_list.append(CodeMarker(next_code_marker_start,
- subregion.get_offset_begin(),
- Marker.T.CODE))
- next_code_marker_start = subregion.get_offset_begin()
-
- # here is the recursive call for all sub-regions
- (marker_start_ind, next_code_marker_start) = cache_markers_list_rec(data,
- sub_id,
- marker_start_ind,
- next_code_marker_start)
-
- # cache all markers after the last subregion
- while len(data.markers) > marker_start_ind and \
- region.get_offset_end() > data.markers[marker_start_ind].get_offset_begin():
- # append code markers coming before non-code marker
- if next_code_marker_start < data.markers[marker_start_ind].get_offset_begin():
- region._markers_list.append(CodeMarker(next_code_marker_start,
- data.markers[marker_start_ind].get_offset_begin(),
- Marker.T.CODE))
- next_code_marker_start = data.markers[marker_start_ind].get_offset_end()
- region._markers_list.append(marker_start_ind)
- marker_start_ind += 1
-
- # cache the last code segment after the last marker
- if next_code_marker_start < region.get_offset_end():
- region._markers_list.append(CodeMarker(next_code_marker_start,
- region.get_offset_end(),
- Marker.T.CODE))
- next_code_marker_start = region.get_offset_end()
-
- # return the starting point for the next call of this function
- return (marker_start_ind, next_code_marker_start)
-
- # append markers list to all regions recursively
- (next_marker_pos, next_code_marker_start) = cache_markers_list_rec(self, 1, 0, 0)
- assert(next_marker_pos == len(self.markers))
-
- # excluding subregions
- if exclude_children == True:
- for marker_ind in region._markers_list:
- if isinstance(marker_ind, int):
- marker = self.markers[marker_ind]
- else:
- marker = marker_ind # CodeMarker
- if marker.group & filter_group:
- yield marker
-
-
- # including subregions
- else:
- next_code_marker_start = region.get_offset_begin()
- for marker in self.markers[region._first_marker_ind:]:
- if marker.get_offset_begin() >= region.get_offset_end():
- break
- if region.get_offset_begin() > marker.get_offset_begin():
- continue
- if Marker.T.CODE & filter_group and next_code_marker_start < marker.get_offset_begin():
- yield Marker(next_code_marker_start, marker.get_offset_begin(), Marker.T.CODE)
- if marker.group & filter_group:
- yield marker
- next_code_marker_start = marker.get_offset_end()
- if Marker.T.CODE & filter_group and next_code_marker_start < region.get_offset_end():
- yield Marker(next_code_marker_start, region.get_offset_end(), Marker.T.CODE)
- def are_markers_loaded(self):
- return self.markers != None
- def __repr__(self):
- return Data.__repr__(self) + " and regions " + self.regions.__repr__()
- class AggregatedData(Data):
-
- def __init__(self, loader, path):
- Data.__init__(self)
- self.path = path
- self.loader = loader
- self.subdirs = None
- self.subfiles = None
-
- def get_subdirs(self):
- if self.subdirs != None:
- return self.subdirs
- self.subdirs = []
- if self.path != None:
- for subdir in self.loader.db.iterate_dircontent(self.path, include_subdirs = True, include_subfiles = False):
- self.subdirs.append(subdir)
- return self.subdirs
-
- def get_subfiles(self):
- if self.subfiles != None:
- return self.subfiles
- self.subfiles = []
- if self.path != None:
- for subfile in self.loader.db.iterate_dircontent(self.path, include_subdirs = False, include_subfiles = True):
- self.subfiles.append(subfile)
- return self.subfiles
- class SelectData(Data):
- def __init__(self, loader, path, file_id, region_id):
- Data.__init__(self)
- self.loader = loader
- self.path = path
- self.file_id = file_id
- self.region_id = region_id
- self.region = None
-
- def get_path(self):
- return self.path
-
- def get_region(self):
- if self.region == None and self.region_id != None:
- row = self.loader.db.get_region(self.file_id, self.region_id)
- if row != None:
- self.region = Region(self.loader,
- self.file_id,
- self.region_id,
- row.name,
- row.begin,
- row.end,
- row.line_begin,
- row.line_end,
- row.cursor,
- row.group,
- row.checksum)
- return self.region
- class DiffData(Data):
-
- def __init__(self, new_data, old_data):
- Data.__init__(self)
- self.new_data = new_data
- self.old_data = old_data
-
- def get_data(self, namespace, field):
- new_data = self.new_data.get_data(namespace, field)
- old_data = self.old_data.get_data(namespace, field)
- if new_data == None:
- return None
- if old_data == None:
- # non_zero fields has got zero value by default if missed
- # the data can be also unavailable,
- # because previous collection does not include that
- # but external tools (like limit.py) should warn about this,
- # using list of registered database properties
- old_data = 0
- return new_data - old_data
-
- ####################################
- # Loader
- ####################################
- class Namespace(object):
-
- class NamespaceError(Exception):
- def __init__(self, namespace, reason):
- Exception.__init__(self, "Namespace '"
- + namespace
- + "': '"
- + reason
- + "'")
-
- class FieldError(Exception):
- def __init__(self, field, reason):
- Exception.__init__(self, "Field '"
- + field
- + "': '"
- + reason
- + "'")
- def __init__(self, db_handle, name, support_regions = False, version='1.0'):
- if not isinstance(name, str):
- raise Namespace.NamespaceError(name, "name not a string")
- self.name = name
- self.support_regions = support_regions
- self.fields = {}
- self.db = db_handle
-
- if self.db.check_table(name) == False:
- self.db.create_table(name, support_regions, version)
- else:
- for column in self.db.iterate_columns(name):
- self.add_field(column.name,
- mpp.internal.api_impl.PackagerFactory().get_python_type(column.sql_type),
- non_zero=column.non_zero)
-
- def get_name(self):
- return self.name
- def are_regions_supported(self):
- return self.support_regions
-
- def add_field(self, field_name, python_type, non_zero=False):
- if not isinstance(field_name, str):
- raise Namespace.FieldError(field_name, "field_name not a string")
- packager = mpp.internal.api_impl.PackagerFactory().create(python_type, non_zero)
- if field_name in self.fields.keys():
- raise Namespace.FieldError(field_name, "double used")
- self.fields[field_name] = packager
-
- if self.db.check_column(self.get_name(), field_name) == False:
- # - False if cloned
- # - True if created
- return self.db.create_column(self.name, field_name, packager.get_sql_type(), non_zero=non_zero)
- return None # if double request
-
- def iterate_field_names(self):
- for name in self.fields.keys():
- yield name
-
- def check_field(self, field_name):
- try:
- self._get_field_packager(field_name)
- except mpp.internal.api_impl.PackagerError:
- return False
- return True
- def get_field_sql_type(self, field_name):
- try:
- return self._get_field_packager(field_name).get_sql_type()
- except mpp.internal.api_impl.PackagerError:
- raise Namespace.FieldError(field_name, 'does not exist')
- def get_field_python_type(self, field_name):
- try:
- return self._get_field_packager(field_name).get_python_type()
- except mpp.internal.api_impl.PackagerError:
- raise Namespace.FieldError(field_name, 'does not exist')
- def is_field_non_zero(self, field_name):
- try:
- return self._get_field_packager(field_name).is_non_zero()
- except mpp.internal.api_impl.PackagerError:
- raise Namespace.FieldError(field_name, 'does not exist')
- def _get_field_packager(self, field_name):
- if field_name in self.fields.keys():
- return self.fields[field_name]
- else:
- raise mpp.internal.api_impl.PackagerError("unknown field " + field_name + " requested")
-
- class Loader(object):
-
- def __init__(self):
- self.namespaces = {}
- self.db = None
- self.last_file_data = None # for performance boost reasons
-
- def create_database(self, dbfile, previous_db = None):
- self.db = mpp.internal.dbwrap.Database()
- try:
- self.db.create(dbfile, clone_from=previous_db)
- except:
- return False
- return True
-
- def open_database(self, dbfile, read_only = True):
- self.db = mpp.internal.dbwrap.Database()
- if os.path.exists(dbfile) == False:
- return False
- try:
- self.db.connect(dbfile, read_only=read_only)
- except:
- return False
-
- for table in self.db.iterate_tables():
- self.create_namespace(table.name, table.support_regions)
- return True
- def set_property(self, property_name, value):
- if self.db == None:
- return None
- return self.db.set_property(property_name, str(value))
-
- def get_property(self, property_name):
- if self.db == None:
- return None
- return self.db.get_property(property_name)
- def iterate_properties(self):
- if self.db == None:
- return None
- return self.db.iterate_properties()
-
- def create_namespace(self, name, support_regions = False, version='1.0'):
- if self.db == None:
- return None
-
- if name in self.namespaces.keys():
- raise Namespace.NamespaceError(name, "double used")
- new_namespace = Namespace(self.db, name, support_regions, version)
- self.namespaces[name] = new_namespace
- return new_namespace
-
- def iterate_namespace_names(self):
- for name in self.namespaces.keys():
- yield name
- def get_namespace(self, name):
- if name in self.namespaces.keys():
- return self.namespaces[name]
- else:
- return None
- def create_file_data(self, path, checksum, content):
- if self.db == None:
- return None
- (new_id, is_updated) = self.db.create_file(path, checksum)
- result = FileData(self, path, new_id, checksum, content)
- self.last_file_data = result
- return (result, is_updated)
- def load_file_data(self, path):
- if self.db == None:
- return None
- if self.last_file_data != None and self.last_file_data.get_path() == path:
- return self.last_file_data
-
- data = self.db.get_file(path)
- if data == None:
- return None
-
- result = FileData(self, data.path, data.id, data.checksum, None)
- self.last_file_data = result
- return result
- class DataNotPackable(Exception):
- def __init__(self, namespace, field, value, packager, extra_message):
- Exception.__init__(self, "Data '"
- + str(value)
- + "' of type "
- + str(value.__class__)
- + " referred by '"
- + namespace
- + "=>"
- + field
- + "' is not packable by registered packager '"
- + str(packager.__class__)
- + "': " + extra_message)
- def save_file_data(self, file_data):
- if self.db == None:
- return None
- class DataIterator(object):
- def iterate_packed_values(self, data, namespace, support_regions = False):
- for each in data.iterate_fields(namespace):
- space = self.loader.get_namespace(namespace)
- if space == None:
- raise Loader.DataNotPackable(namespace, each[0], each[1], None, "The namespace has not been found")
-
- try:
- packager = space._get_field_packager(each[0])
- except mpp.internal.api_impl.PackagerError:
- raise Loader.DataNotPackable(namespace, each[0], each[1], None, "The field has not been found")
-
- if space.support_regions != support_regions:
- raise Loader.DataNotPackable(namespace, each[0], each[1], packager, "Incompatible support for regions")
-
- try:
- packed_data = packager.pack(each[1])
- if packed_data == None:
- continue
- except mpp.internal.api_impl.PackagerError:
- raise Loader.DataNotPackable(namespace, each[0], each[1], packager, "Packager raised exception")
-
- yield (each[0], packed_data)
-
- def __init__(self, loader, data, namespace, support_regions = False):
- self.loader = loader
- self.iterator = self.iterate_packed_values(data, namespace, support_regions)
-
- def __iter__(self):
- return self.iterator
-
- # TODO can construct to add multiple rows at one sql query
- # to improve the performance
- for namespace in file_data.iterate_namespaces():
- if file_data.is_namespace_updated(namespace) == False:
- continue
- self.db.add_row(namespace,
- file_data.get_id(),
- None,
- DataIterator(self, file_data, namespace))
-
- if file_data.are_regions_loaded():
- for region in file_data.iterate_regions():
- for namespace in region.iterate_namespaces():
- if region.is_namespace_updated(namespace) == False:
- continue
- self.db.add_row(namespace,
- file_data.get_id(),
- region.get_id(),
- DataIterator(self, region, namespace, support_regions = True))
- def iterate_file_data(self, path = None, path_like_filter = "%"):
- if self.db == None:
- return None
-
- final_path_like = path_like_filter
- if path != None:
- if self.db.check_dir(path) == False and self.db.check_file(path) == False:
- return None
- final_path_like = path + path_like_filter
- class FileDataIterator(object):
- def iterate_file_data(self, loader, final_path_like):
- for data in loader.db.iterate_files(path_like=final_path_like):
- yield FileData(loader, data.path, data.id, data.checksum, None)
-
- def __init__(self, loader, final_path_like):
- self.iterator = self.iterate_file_data(loader, final_path_like)
-
- def __iter__(self):
- return self.iterator
- if self.db == None:
- return None
- return FileDataIterator(self, final_path_like)
- def load_aggregated_data(self, path = None, path_like_filter = "%", namespaces = None):
- if self.db == None:
- return None
- final_path_like = path_like_filter
- if path != None:
- if self.db.check_dir(path) == False and self.db.check_file(path) == False:
- return None
- final_path_like = path + path_like_filter
-
- if namespaces == None:
- namespaces = self.namespaces.keys()
-
- result = AggregatedData(self, path)
- for name in namespaces:
- namespace = self.get_namespace(name)
- data = self.db.aggregate_rows(name, path_like = final_path_like)
- for field in data.keys():
- if namespace.get_field_python_type(field) == str:
- continue
- data[field]['nonzero'] = namespace.is_field_non_zero(field)
- distribution = self.db.count_rows(name, path_like = final_path_like, group_by_column = field)
- data[field]['distribution-bars'] = []
- for each in distribution:
- if each[0] == None:
- continue
- assert(float(data[field]['count'] != 0))
- data[field]['distribution-bars'].append({'metric': each[0],
- 'count': each[1],
- 'ratio': (float(each[1]) / float(data[field]['count']))})
- result.set_data(name, field, data[field])
- return result
-
- def load_selected_data(self, namespace, fields = None, path = None, path_like_filter = "%", filters = [],
- sort_by = None, limit_by = None):
- if self.db == None:
- return None
-
- final_path_like = path_like_filter
- if path != None:
- if self.db.check_dir(path) == False and self.db.check_file(path) == False:
- return None
- final_path_like = path + path_like_filter
-
- namespace_obj = self.get_namespace(namespace)
- if namespace_obj == None:
- return None
-
- class SelectDataIterator(object):
-
- def iterate_selected_values(self, loader, namespace_obj, final_path_like, fields, filters, sort_by, limit_by):
- for row in loader.db.select_rows(namespace_obj.get_name(), path_like=final_path_like, filters=filters,
- order_by=sort_by, limit_by=limit_by):
- region_id = None
- if namespace_obj.are_regions_supported() == True:
- region_id = row['region_id']
- data = SelectData(loader, row['path'], row['id'], region_id)
- field_names = fields
- if fields == None:
- field_names = namespace_obj.iterate_field_names()
- for field in field_names:
- data.set_data(namespace, field, row[field])
- yield data
-
- def __init__(self, loader, namespace_obj, final_path_like, fields, filters, sort_by, limit_by):
- self.iterator = self.iterate_selected_values(loader, namespace_obj, final_path_like, fields, filters, sort_by, limit_by)
-
- def __iter__(self):
- return self.iterator
- return SelectDataIterator(self, namespace_obj, final_path_like, fields, filters, sort_by, limit_by)
- class BasePlugin(object):
-
- def initialize(self):
- pass
- def terminate(self):
- pass
-
- def set_name(self, name):
- self.name = name
- def get_name(self):
- if hasattr(self, 'name') == False:
- return None
- return self.name
- def set_version(self, version):
- self.version = version
- def get_version(self):
- if hasattr(self, 'version') == False:
- return None
- return self.version
- def _set_plugin_loader(self, loader):
- self.plugin_loader = loader
- def _get_plugin_loader(self):
- if hasattr(self, 'plugin_loader') == False:
- return None
- return self.plugin_loader
-
- def get_plugin(self, plugin_name):
- return self._get_plugin_loader().get_plugin(plugin_name)
- def get_action(self):
- return self._get_plugin_loader().get_action()
- class Plugin(BasePlugin):
- class Field(object):
- def __init__(self, name, ftype, non_zero=False):
- self.name = name
- self.type = ftype
- self.non_zero = non_zero
- class Property(object):
- def __init__(self, name, value):
- self.name = name
- self.value = value
-
- def initialize(self, namespace=None, support_regions=True, fields=[], properties=[]):
- super(Plugin, self).initialize()
-
- if hasattr(self, 'is_updated') == False:
- self.is_updated = False # original initialization
- db_loader = self.get_plugin('mpp.dbf').get_loader()
- if namespace == None:
- namespace = self.get_name()
- if (len(fields) != 0 or len(properties) != 0):
- prev_version = db_loader.set_property(self.get_name() + ":version", self.get_version())
- if str(prev_version) != str(self.get_version()):
- self.is_updated = True
- for prop in properties:
- assert(prop.name != 'version')
- prev_prop = db_loader.set_property(self.get_name() + ":" + prop.name, prop.value)
- if str(prev_prop) != str(prop.value):
- self.is_updated = True
- if len(fields) != 0:
- namespace_obj = db_loader.create_namespace(namespace,
- support_regions=support_regions,
- version=self.get_version())
- for field in fields:
- is_created = namespace_obj.add_field(field.name, field.type, non_zero=field.non_zero)
- assert(is_created != None)
- # if field is created (not cloned from the previous db),
- # mark the plug-in as updated in order to trigger full rescan
- self.is_updated = self.is_updated or is_created
- class MetricPluginMixin(object):
- class AliasError(Exception):
- def __init__(self, alias):
- Exception.__init__(self, "Unknown pattern alias: " + str(alias))
-
- class PlainCounter(object):
-
- def __init__(self, plugin, alias, data, region):
- self.plugin = plugin
- self.alias = alias
- self.data = data
- self.region = region
- self.result = 0
-
- def count(self, marker, pattern_to_search):
- self.result += len(pattern_to_search.findall(self.data.get_content(),
- marker.get_offset_begin(),
- marker.get_offset_end()))
-
- def get_result(self):
- return self.result
- class IterIncrementCounter(PlainCounter):
-
- def count(self, marker, pattern_to_search):
- self.marker = marker
- self.pattern_to_search = pattern_to_search
- for match in pattern_to_search.finditer(self.data.get_content(),
- marker.get_offset_begin(),
- marker.get_offset_end()):
- self.result += self.increment(match)
-
- def increment(self, match):
- return 1
- class IterAssignCounter(PlainCounter):
-
- def count(self, marker, pattern_to_search):
- self.marker = marker
- self.pattern_to_search = pattern_to_search
- for match in pattern_to_search.finditer(self.data.get_content(),
- marker.get_offset_begin(),
- marker.get_offset_end()):
- self.result = self.assign(match)
-
- def assign(self, match):
- return self.result
- def declare_metric(self, is_active, field,
- pattern_to_search_or_map_of_patterns,
- marker_type_mask=Marker.T.ANY,
- region_type_mask=Region.T.ANY,
- exclude_subregions=True,
- merge_markers=False):
- if hasattr(self, '_fields') == False:
- self._fields = {}
-
- if isinstance(pattern_to_search_or_map_of_patterns, dict):
- map_of_patterns = pattern_to_search_or_map_of_patterns
- else:
- map_of_patterns = {'*': pattern_to_search_or_map_of_patterns}
- # client may suply with pattern or pair of pattern + counter class
- for key in map_of_patterns.keys():
- if isinstance(map_of_patterns[key], tuple) == False:
- # if it is not a pair, create a pair using default counter class
- map_of_patterns[key] = (map_of_patterns[key],
- MetricPluginMixin.PlainCounter)
- if is_active == True:
- self._fields[field.name] = (field,
- marker_type_mask,
- exclude_subregions,
- merge_markers,
- map_of_patterns,
- region_type_mask)
-
- def is_active(self, metric_name = None):
- if metric_name == None:
- return (len(self._fields.keys()) > 0)
- return (metric_name in self._fields.keys())
-
- def get_fields(self):
- result = []
- for key in self._fields.keys():
- result.append(self._fields[key][0])
- return result
-
- def callback(self, parent, data, is_updated):
- # count if metric is enabled,
- # and (optimization for the case of iterative rescan:)
- # if file is updated or this plugin's settings are updated
- is_updated = is_updated or self.is_updated
- if is_updated == True:
- for field in self.get_fields():
- self.count_if_active(field.name, data, alias=parent.get_name())
- # if parent, notify children
- if isinstance(self, Parent):
- self.notify_children(data, is_updated)
- def count_if_active(self, metric_name, data, namespace=None, alias='*'):
- if self.is_active(metric_name) == False:
- return
-
- if namespace == None:
- namespace = self.get_name()
-
- field_data = self._fields[metric_name]
- if alias not in field_data[4].keys():
- if '*' not in field_data[4].keys():
- raise self.AliasError(alias)
- else:
- alias = '*'
- (pattern_to_search, counter_class) = field_data[4][alias]
-
- for region in data.iterate_regions(filter_group=field_data[5]):
- counter = counter_class(self, alias, data, region)
- for marker in data.iterate_markers(
- filter_group = field_data[1],
- region_id = region.get_id(),
- exclude_children = field_data[2],
- merge=field_data[3]):
- counter.count(marker, pattern_to_search)
- count = counter.get_result()
- if count != 0 or field_data[0].non_zero == False:
- region.set_data(namespace, metric_name, count)
- class InterfaceNotImplemented(Exception):
- def __init__(self, obj):
- Exception.__init__(self, "Method '"
- + sys._getframe(1).f_code.co_name
- + "' has not been implemented for "
- + str(obj.__class__))
- class IConfigurable(object):
- def configure(self, options):
- raise InterfaceNotImplemented(self)
- def declare_configuration(self, optparser):
- raise InterfaceNotImplemented(self)
- class IRunable(object):
- def run(self, args):
- raise InterfaceNotImplemented(self)
-
- class IParser(object):
- def process(self, parent, data, is_updated):
- raise InterfaceNotImplemented(self)
- class ICode(object):
- pass
- class CallbackNotImplemented(Exception):
-
- def __init__(self, obj, callback_name):
- Exception.__init__(self, "Callback '"
- + callback_name
- + "' has not been implemented for "
- + str(obj.__class__))
- class Child(object):
-
- def notify(self, parent, callback_name, *args):
- if hasattr(self, callback_name) == False:
- raise CallbackNotImplemented(self, callback_name)
- self.__getattribute__(callback_name)(parent, *args)
- def subscribe_by_parents_name(self, parent_name, callback_name='callback'):
- self.get_plugin(parent_name).subscribe(self, callback_name)
-
- def subscribe_by_parents_names(self, parent_names, callback_name='callback'):
- for parent_name in parent_names:
- self.get_plugin(parent_name).subscribe(self, callback_name)
- def subscribe_by_parents_interface(self, interface, callback_name='callback'):
- for plugin in self._get_plugin_loader().iterate_plugins():
- if isinstance(plugin, interface):
- plugin.subscribe(self, callback_name)
- class Parent(object):
-
- def init_Parent(self):
- if hasattr(self, 'children') == False:
- self.children = []
-
- def subscribe(self, obj, callback_name):
- self.init_Parent()
- if (isinstance(obj, Child) == False):
- raise TypeError()
- self.children.append((obj,callback_name))
- def unsubscribe(self, obj, callback_name):
- self.init_Parent()
- self.children.remove((obj, callback_name))
- def notify_children(self, *args):
- self.init_Parent()
- for child in self.children:
- child[0].notify(self, child[1], *args)
- def iterate_children(self):
- self.init_Parent()
- for child in self.children:
- yield child
|