Kaynağa Gözat

internal stuff hidden

avkonst 11 yıl önce
ebeveyn
işleme
23a4c55578

+ 9 - 9
mainline/ext/std/code/cpp.py

@@ -38,7 +38,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigura
         mpp.api.Plugin.initialize(self, properties=[
             self.Property('files', ','.join(self.files))
         ])
-        self.get_plugin_loader().get_plugin('std.tools.collect').register_parser(self.files, self)
+        self.get_plugin('std.tools.collect').register_parser(self.files, self)
         
     def process(self, parent, data, is_updated):
         is_updated = is_updated or self.is_updated
@@ -130,15 +130,15 @@ class CppCodeParser(object):
         def add_regions_rec(self, data, blocks):
             def get_type_id(data, named_type):
                 if named_type == "function":
-                    return data.get_region_types().FUNCTION
+                    return mpp.api.Region.T.FUNCTION
                 elif named_type == "class":
-                    return data.get_region_types().CLASS
+                    return mpp.api.Region.T.CLASS
                 elif named_type == "struct":
-                    return data.get_region_types().STRUCT
+                    return mpp.api.Region.T.STRUCT
                 elif named_type == "namespace":
-                    return data.get_region_types().NAMESPACE
+                    return mpp.api.Region.T.NAMESPACE
                 elif named_type == "__global__":
-                    return data.get_region_types().GLOBAL
+                    return mpp.api.Region.T.GLOBAL
                 else:
                     assert(False)
             for each in blocks:
@@ -169,15 +169,15 @@ class CppCodeParser(object):
         for m in re.finditer(self.regex_cpp, text):
             # Comment
             if text[m.start()] == '/':
-                data.add_marker(m.start(), m.end(), data.get_marker_types().COMMENT)
+                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.COMMENT)
             
             # String
             elif text[m.start()] == '"' or text[m.start()] == '\'':
-                data.add_marker(m.start() + 1, m.end() - 1, data.get_marker_types().STRING)
+                data.add_marker(m.start() + 1, m.end() - 1, mpp.api.Marker.T.STRING)
             
             # Preprocessor (including internal comments)
             elif text[m.start()] == ' ' or text[m.start()] == '\t' or text[m.start()] == '#':
-                data.add_marker(m.start(), m.end(), data.get_marker_types().PREPROCESSOR)
+                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.PREPROCESSOR)
 
             # Statement end
             elif text[m.start()] == ';':

+ 10 - 10
mainline/ext/std/code/cs.py

@@ -38,7 +38,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigura
         mpp.api.Plugin.initialize(self, properties=[
             self.Property('files', ','.join(self.files))
         ])
-        self.get_plugin_loader().get_plugin('std.tools.collect').register_parser(self.files, self)
+        self.get_plugin('std.tools.collect').register_parser(self.files, self)
         
     def process(self, parent, data, is_updated):
         is_updated = is_updated or self.is_updated
@@ -144,17 +144,17 @@ class CsCodeParser(object):
         def add_regions_rec(self, data, blocks):
             def get_type_id(data, named_type):
                 if named_type == "function":
-                    return data.get_region_types().FUNCTION
+                    return mpp.api.Region.T.FUNCTION
                 elif named_type == "class":
-                    return data.get_region_types().CLASS
+                    return mpp.api.Region.T.CLASS
                 elif named_type == "struct":
-                    return data.get_region_types().STRUCT
+                    return mpp.api.Region.T.STRUCT
                 elif named_type == "namespace":
-                    return data.get_region_types().NAMESPACE
+                    return mpp.api.Region.T.NAMESPACE
                 elif named_type == "interface":
-                    return data.get_region_types().INTERFACE
+                    return mpp.api.Region.T.INTERFACE
                 elif named_type == "__global__":
-                    return data.get_region_types().GLOBAL
+                    return mpp.api.Region.T.GLOBAL
                 else:
                     assert(False)
             for each in blocks:
@@ -185,15 +185,15 @@ class CsCodeParser(object):
         for m in re.finditer(self.regex_cpp, text):
             # Comment
             if text[m.start()] == '/':
-                data.add_marker(m.start(), m.end(), data.get_marker_types().COMMENT)
+                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.COMMENT)
             
             # String
             elif text[m.start()] == '"' or text[m.start()] == '\'':
-                data.add_marker(m.start() + 1, m.end() - 1, data.get_marker_types().STRING)
+                data.add_marker(m.start() + 1, m.end() - 1, mpp.api.Marker.T.STRING)
             
             # Preprocessor (including internal comments)
             elif text[m.start()] == ' ' or text[m.start()] == '\t' or text[m.start()] == '#':
-                data.add_marker(m.start(), m.end(), data.get_marker_types().PREPROCESSOR)
+                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.PREPROCESSOR)
 
             # Statement end
             elif text[m.start()] == ';':

+ 7 - 7
mainline/ext/std/code/java.py

@@ -38,7 +38,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigura
         mpp.api.Plugin.initialize(self, properties=[
             self.Property('files', ','.join(self.files))
         ])
-        self.get_plugin_loader().get_plugin('std.tools.collect').register_parser(self.files, self)
+        self.get_plugin('std.tools.collect').register_parser(self.files, self)
         
     def process(self, parent, data, is_updated):
         is_updated = is_updated or self.is_updated
@@ -124,13 +124,13 @@ class JavaCodeParser(object):
         def add_regions_rec(self, data, blocks):
             def get_type_id(data, named_type):
                 if named_type == "function":
-                    return data.get_region_types().FUNCTION
+                    return mpp.api.Region.T.FUNCTION
                 elif named_type == "class":
-                    return data.get_region_types().CLASS
+                    return mpp.api.Region.T.CLASS
                 elif named_type == "interface":
-                    return data.get_region_types().INTERFACE
+                    return mpp.api.Region.T.INTERFACE
                 elif named_type == "__global__":
-                    return data.get_region_types().GLOBAL
+                    return mpp.api.Region.T.GLOBAL
                 else:
                     assert(False)
             for each in blocks:
@@ -161,11 +161,11 @@ class JavaCodeParser(object):
         for m in re.finditer(self.regex_cpp, text):
             # Comment
             if text[m.start()] == '/':
-                data.add_marker(m.start(), m.end(), data.get_marker_types().COMMENT)
+                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.COMMENT)
             
             # String
             elif text[m.start()] == '"' or text[m.start()] == '\'':
-                data.add_marker(m.start() + 1, m.end() - 1, data.get_marker_types().STRING)
+                data.add_marker(m.start() + 1, m.end() - 1, mpp.api.Marker.T.STRING)
             
             # Statement end
             elif text[m.start()] == ';':

+ 5 - 5
mainline/ext/std/code/test.py

@@ -35,9 +35,9 @@ class Plugin(mpp.api.Plugin, mpp.api.Child):
         for region in data.iterate_regions():
             logging.warn(region.get_name() + " " + str(region.get_cursor()))
             for marker in data.iterate_markers(region_id=region.get_id(),
-                                               filter_group = data.get_marker_types().ANY,
+                                               filter_group = mpp.api.Marker.T.ANY,
                                                exclude_children = True):
-                logging.warn("\tMarker: " + data.get_marker_types()().to_str(marker.get_type()) +
+                logging.warn("\tMarker: " + mpp.api.Marker.T().to_str(marker.get_type()) +
                              " " + str(marker.get_offset_begin()) + " " + str(marker.get_offset_end()) +
                              " >>>" + text[marker.get_offset_begin():marker.get_offset_end()] + "<<<")
                 text_comb += text[marker.get_offset_begin():marker.get_offset_end()]
@@ -45,9 +45,9 @@ class Plugin(mpp.api.Plugin, mpp.api.Child):
 
         text_comb = ""
         for marker in data.iterate_markers(region_id=1,
-                                           filter_group = data.get_marker_types().ANY,
+                                           filter_group = mpp.api.Marker.T.ANY,
                                            exclude_children = False):
-            logging.warn("\tMarker: " + data.get_marker_types()().to_str(marker.get_type()) +
+            logging.warn("\tMarker: " + mpp.api.Marker.T().to_str(marker.get_type()) +
                          " " + str(marker.get_offset_begin()) + " " + str(marker.get_offset_end()) +
                          " >>>" + text[marker.get_offset_begin():marker.get_offset_end()] + "<<<")
             text_comb += text[marker.get_offset_begin():marker.get_offset_end()]
@@ -57,7 +57,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Child):
         for region in data.iterate_regions():
             logging.warn(region.get_name() + " " + str(region.get_cursor()))
             for marker in data.iterate_markers(region_id=region.get_id(),
-                                               filter_group = data.get_marker_types().ANY,
+                                               filter_group = mpp.api.Marker.T.ANY,
                                                exclude_children = True,
                                                merge = True):
                 logging.warn("\tMarker: merged" + 

+ 3 - 3
mainline/ext/std/suppress.py

@@ -66,7 +66,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
                 list_text = []
                 last_comment_end = None
                 for marker in data.iterate_markers(
-                                filter_group = data.get_marker_types().COMMENT,
+                                filter_group = mpp.api.Marker.T.COMMENT,
                                 region_id = region.get_id(),
                                 exclude_children = True):
                     
@@ -79,9 +79,9 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
                     matches = self.pattern.findall(text, marker.get_offset_begin(), marker.get_offset_end())
                     for m in matches:
                         namespace_name, field = m.split(':')
-                        db_loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+                        db_loader = self.get_plugin('mpp.dbf').get_loader()
                         namespace = db_loader.get_namespace(namespace_name)
-                        if namespace == None or namespace.get_field_packager(field) == None:
+                        if namespace == None or namespace.check_field(field) == False:
                             mpp.cout.notify(data.get_path(), region.get_cursor(),
                                                   mpp.cout.SEVERITY_WARNING,
                                                   "Suppressed metric '" + namespace_name + ":" + field +

+ 3 - 3
mainline/ext/std/tools/collect.py

@@ -67,8 +67,8 @@ class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IConfigurable, mpp.api.IRun
         if self.is_size_enabled == True:
             fields.append(self.Field('size', int))
         super(Plugin, self).initialize(namespace='std.general', support_regions=False, fields=fields)
-        self.add_exclude_file(self.get_plugin_loader().get_plugin('mpp.dbf').get_dbfile_path())
-        self.add_exclude_file(self.get_plugin_loader().get_plugin('mpp.dbf').get_dbfile_prev_path())
+        self.add_exclude_file(self.get_plugin('mpp.dbf').get_dbfile_path())
+        self.add_exclude_file(self.get_plugin('mpp.dbf').get_dbfile_prev_path())
         
     def run(self, args):
         if len(args) == 0:
@@ -131,7 +131,7 @@ class DirectoryReader():
                         f.close()
                         checksum = binascii.crc32(text) & 0xffffffff # to match python 3
                         
-                        db_loader = plugin.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+                        db_loader = plugin.get_plugin('mpp.dbf').get_loader()
                         (data, is_updated) = db_loader.create_file_data(norm_path, checksum, text)
                         procerrors = parser.process(plugin, data, is_updated)
                         if plugin.is_proctime_enabled == True:

+ 4 - 4
mainline/ext/std/tools/debug.py

@@ -34,7 +34,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
         self.mode = options.__dict__['mode']
 
     def run(self, args):
-        loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+        loader = self.get_plugin('mpp.dbf').get_loader()
     
         if self.mode == 'dumphtml':
             return dumphtml(args, loader)
@@ -68,11 +68,11 @@ def dumphtml(args, loader):
                                            mpp.api.Marker.T.STRING |
                                            mpp.api.Marker.T.PREPROCESSOR):
             result += (cgi.escape(text[last_pos:marker.begin]))
-            if marker.get_type() == data.get_marker_types().STRING:
+            if marker.get_type() == mpp.api.Marker.T.STRING:
                 result += ('<span style="color:#0000FF">')
-            elif marker.get_type() == data.get_marker_types().COMMENT:
+            elif marker.get_type() == mpp.api.Marker.T.COMMENT:
                 result += ('<span style="color:#009900">')
-            elif marker.get_type() == data.get_marker_types().PREPROCESSOR:
+            elif marker.get_type() == mpp.api.Marker.T.PREPROCESSOR:
                 result += ('<span style="color:#990000">')
             else:
                 assert False, "Uknown marker type"

+ 2 - 2
mainline/ext/std/tools/export.py

@@ -26,8 +26,8 @@ import csv
 class Plugin(mpp.api.Plugin, mpp.api.IRunable):
 
     def run(self, args):
-        self.loader_prev = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader_prev()
-        self.loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+        self.loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
+        self.loader = self.get_plugin('mpp.dbf').get_loader()
     
         paths = None
         if len(args) == 0:

+ 6 - 6
mainline/ext/std/tools/info.py

@@ -29,8 +29,8 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
     def run(self, args):
         exit_code = 0
     
-        loader_prev = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader_prev(none_if_empty=True)
-        loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev(none_if_empty=True)
+        loader = self.get_plugin('mpp.dbf').get_loader()
     
         details = []
         for each in loader.iterate_properties():
@@ -42,7 +42,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
                 elif prev != each.value:
                     prev_value_str = " [modified (was: " + loader_prev.get_property(each.name) + ")]"
             details.append((each.name, each.value + prev_value_str))
-        path = self.get_plugin_loader().get_plugin('mpp.dbf').get_dbfile_path()
+        path = self.get_plugin('mpp.dbf').get_dbfile_path()
         if ('METRIXPLUSPLUS_TEST_MODE' in os.environ.keys() and
              os.environ['METRIXPLUSPLUS_TEST_MODE'] == "True"):
             # in tests, paths come as full paths, strip it for consistent gold files
@@ -55,11 +55,11 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
             for field in sorted(loader.get_namespace(each).iterate_field_names()):
                 prev_value_str = ""
                 if loader_prev != None:
-                    prev = None
+                    prev = False
                     prev_namespace = loader_prev.get_namespace(each)
                     if prev_namespace != None:
-                        prev = prev_namespace.get_field_packager(field)
-                    if prev == None:
+                        prev = prev_namespace.check_field(field)
+                    if prev == False:
                         prev_value_str = " [new]"
                 details.append((each + ':' + field,  prev_value_str))
         mpp.cout.notify(path, '', mpp.cout.SEVERITY_INFO, 'Collected metrics:', details)

+ 3 - 3
mainline/ext/std/tools/limit.py

@@ -105,7 +105,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
 
     def initialize(self):
         super(Plugin, self).initialize()
-        db_loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+        db_loader = self.get_plugin('mpp.dbf').get_loader()
         self._verify_namespaces(db_loader.iterate_namespace_names())
         for each in db_loader.iterate_namespace_names():
             self._verify_fields(each, db_loader.get_namespace(each).iterate_field_names())
@@ -155,8 +155,8 @@ def main(plugin, args):
     
     exit_code = 0
 
-    loader_prev = plugin.get_plugin_loader().get_plugin('mpp.dbf').get_loader_prev()
-    loader = plugin.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+    loader_prev = plugin.get_plugin('mpp.dbf').get_loader_prev()
+    loader = plugin.get_plugin('mpp.dbf').get_loader()
     
     paths = None
     if len(args) == 0:

+ 7 - 7
mainline/ext/std/tools/view.py

@@ -64,8 +64,8 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
             self.parser.error("option --scope-mode: The mode '" + options.__dict__['scope_mode'] + "' requires '--db-file-prev' option set")
 
     def run(self, args):
-        loader_prev = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader_prev()
-        loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
+        loader = self.get_plugin('mpp.dbf').get_loader()
     
         paths = None
         if len(args) == 0:
@@ -161,12 +161,12 @@ def load_aggregated_data_with_mode(loader, loader_prev, path, mode):
                 for name in loader.iterate_namespace_names():
                     namespace = loader.get_namespace(name)
                     for field in namespace.iterate_field_names():
-                        if namespace.get_field_packager(field).get_python_type() == str:
+                        if namespace.get_field_python_type(field) == str:
                             # skip string type fields
                             continue
                         self.set_data(name, field, {
                             'count': 0,
-                            'nonzero': namespace.get_field_packager(field).is_non_zero(),
+                            'nonzero': namespace.is_field_non_zero(field),
                             'min': None,
                             'max': None,
                             'total': 0.0,
@@ -181,7 +181,7 @@ def load_aggregated_data_with_mode(loader, loader_prev, path, mode):
                 for name in loader.iterate_namespace_names():
                     namespace = loader.get_namespace(name)
                     for field in namespace.iterate_field_names():
-                        if namespace.get_field_packager(field).get_python_type() == str:
+                        if namespace.get_field_python_type(field) == str:
                             # skip string type fields
                             continue
                         data = self.get_data(name, field)
@@ -312,7 +312,7 @@ def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
                                                region_data_prev.get_data_tree())
                 is_modified = regions_matcher.is_modified(region.get_id())
             regions.append({"info": {"name" : region.name,
-                                     'type': file_data.get_region_types()().to_str(region.get_type()),
+                                     'type': mpp.api.Region.T().to_str(region.get_type()),
                                      'modified': is_modified,
                                      'cursor' : region.cursor,
                                      'line_begin': region.line_begin,
@@ -332,7 +332,7 @@ def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
                                                region_data_prev.get_data_tree())
                 is_modified = regions_matcher.is_modified(region.get_id())
             result = {"info": {"name" : region.name,
-                               'type' : file_data.get_region_types()().to_str(region.get_type()),
+                               'type' : mpp.api.Region.T().to_str(region.get_type()),
                                'modified': is_modified,
                                'cursor' : region.cursor,
                                'line_begin': region.line_begin,

+ 57 - 166
mainline/mpp/api.py

@@ -20,6 +20,7 @@
 import os.path
 
 import mpp.internal.dbwrap
+import mpp.internal.api_impl
 
 ##############################################################################
 #
@@ -81,8 +82,9 @@ class LoadableData(Data):
         if row == None:
             return
         for column_name in row.keys():
-            packager = namespace_obj.get_field_packager(column_name)
-            if packager == None:
+            try:
+                packager = namespace_obj._get_field_packager(column_name)
+            except mpp.internal.api_impl.PackagerError:
                 continue
             if row[column_name] == None:
                 continue
@@ -182,12 +184,12 @@ class Region(LoadableData):
     def get_checksum(self):
         return self.checksum
     
-    def register_subregion_id(self, child_id):
-        self.children.append(child_id)
-
     def iterate_subregion_ids(self):
         return self.children
 
+    def _register_subregion_id(self, child_id):
+        self.children.append(child_id)
+
 class Marker(object):
     class T(object):
         NONE            = 0x00
@@ -262,7 +264,7 @@ class FileData(LoadableData):
                 break
         self.loading_tmp.append(region.get_id())
         if prev_id != None:
-            self.get_region(prev_id).register_subregion_id(region.get_id())
+            self.get_region(prev_id)._register_subregion_id(region.get_id())
         self.regions.append(region)
 
     def load_regions(self):
@@ -294,9 +296,6 @@ class FileData(LoadableData):
         self.load_regions()
         return self.regions[region_id - 1]
     
-    def get_region_types(self):
-        return Region.T
-
     def iterate_regions(self, filter_group = Region.T.ANY):
         self.load_regions()
         for each in self.regions:
@@ -455,9 +454,6 @@ class FileData(LoadableData):
                     if Marker.T.CODE & filter_group and next_code_marker_start < region.get_offset_end():
                         yield Marker(next_code_marker_start, region.get_offset_end(), Marker.T.CODE)
 
-    def get_marker_types(self):
-        return Marker.T
-
     def are_markers_loaded(self):
         return self.markers != None
 
@@ -544,141 +540,7 @@ class DiffData(Data):
             old_data = 0
         return new_data - old_data
 
-####################################
-# Packager Interface
-####################################
-
-class PackagerError(Exception):
-    def __init__(self):
-        Exception.__init__(self, "Failed to pack or unpack.")
-
-class PackagerFactory(object):
-
-    def create(self, python_type, non_zero):
-        if python_type == None:
-            return PackagerFactory.SkipPackager()
-        if python_type == int:
-            if non_zero == False:
-                return PackagerFactory.IntPackager()
-            else:
-                return PackagerFactory.IntNonZeroPackager()
-        if python_type == float and non_zero == False:
-            return PackagerFactory.FloatPackager()
-        if python_type == str:
-            return PackagerFactory.StringPackager()
-        
-        class PackagerFactoryError(Exception):
-            def __init__(self, python_type):
-                Exception.__init__(self, "Python type '" + str(python_type) + "' is not supported by the factory.")
-        raise PackagerFactoryError(python_type)
-    
-    def get_python_type(self, sql_type):
-        if sql_type == "integer":
-            return int
-        if sql_type == "real":
-            return float
-        if sql_type == "text":
-            return str
-
-        class PackagerFactoryError(Exception):
-            def __init__(self, sql_type):
-                Exception.__init__(self, "SQL type '" + str(sql_type) + "' is not supported by the factory.")
-        raise PackagerFactoryError(sql_type)
-
-    class IPackager(object):
-        def pack(self, unpacked_data):
-            raise mpp.api.InterfaceNotImplemented(self)
-        def unpack(self, packed_data):
-            raise mpp.api.InterfaceNotImplemented(self)
-        def get_sql_type(self):
-            raise mpp.api.InterfaceNotImplemented(self)
-        def get_python_type(self):
-            raise mpp.api.InterfaceNotImplemented(self)
-        def is_non_zero(self):
-            return False
-        
-    class IntPackager(IPackager):
-        def pack(self, unpacked_data):
-            if not isinstance(unpacked_data, int):
-                raise PackagerError()
-            return str(unpacked_data)
-            
-        def unpack(self, packed_data): 
-            try:
-                return int(packed_data)
-            except ValueError:
-                raise PackagerError()
-    
-        def get_sql_type(self):
-            return "integer"
-        
-        def get_python_type(self):
-            return int
-    
-    class IntNonZeroPackager(IntPackager):
-        def pack(self, unpacked_data):
-            if unpacked_data == 0:
-                raise PackagerError()
-            return PackagerFactory.IntPackager.pack(self, unpacked_data)
-        def is_non_zero(self):
-            return True
-
-    class FloatPackager(IPackager):
-        def pack(self, unpacked_data):
-            if not isinstance(unpacked_data, float):
-                raise PackagerError()
-            return str(unpacked_data)
-            
-        def unpack(self, packed_data): 
-            try:
-                return float(packed_data)
-            except ValueError:
-                raise PackagerError()
-    
-        def get_sql_type(self):
-            return "real"
-
-        def get_python_type(self):
-            return float
-
-    class FloatNonZeroPackager(FloatPackager):
-        def pack(self, unpacked_data):
-            if unpacked_data == 0:
-                raise PackagerError()
-            return PackagerFactory.FloatPackager.pack(self, unpacked_data)
-        def is_non_zero(self):
-            return True
-
-    class StringPackager(IPackager):
-        def pack(self, unpacked_data):
-            if not isinstance(unpacked_data, str):
-                raise PackagerError()
-            return str(unpacked_data)
-            
-        def unpack(self, packed_data): 
-            try:
-                return str(packed_data)
-            except ValueError:
-                raise PackagerError()
-    
-        def get_sql_type(self):
-            return "text"
 
-        def get_python_type(self):
-            return str
-    
-    class SkipPackager(IPackager):
-        def pack(self, unpacked_data):
-            return None
-            
-        def unpack(self, packed_data): 
-            return None
-    
-        def get_sql_type(self):
-            return None
-            
-        def get_python_type(self):
-            return None
             
 ####################################
 # Loader
@@ -714,7 +576,9 @@ class Namespace(object):
             self.db.create_table(name, support_regions, version)
         else:
             for column in self.db.iterate_columns(name):
-                self.add_field(column.name, PackagerFactory().get_python_type(column.sql_type), non_zero=column.non_zero)
+                self.add_field(column.name,
+                               mpp.internal.api_impl.PackagerFactory().get_python_type(column.sql_type),
+                               non_zero=column.non_zero)
         
     def get_name(self):
         return self.name
@@ -725,7 +589,7 @@ class Namespace(object):
     def add_field(self, field_name, python_type, non_zero=False):
         if not isinstance(field_name, str):
             raise FieldError(field_name, "field_name not a string")
-        packager = PackagerFactory().create(python_type, non_zero)
+        packager = mpp.internal.api_impl.PackagerFactory().create(python_type, non_zero)
         if field_name in self.fields.keys():
             raise FieldError(field_name, "double used")
         self.fields[field_name] = packager
@@ -740,17 +604,37 @@ class Namespace(object):
         for name in self.fields.keys():
             yield name
     
-    def get_field_packager(self, field_name):
-        if field_name in self.fields.keys():
-            return self.fields[field_name]
-        else:
-            return None
-        
+    def check_field(self, field_name):
+        try:
+            self._get_field_packager(field_name)
+        except mpp.internal.api_impl.PackagerError:
+            return False
+        return True
+
     def get_field_sql_type(self, field_name):
-        return self.get_field_packager(field_name).get_sql_type()
+        try:
+            return self._get_field_packager(field_name).get_sql_type()
+        except mpp.internal.api_impl.PackagerError:
+            raise FieldError(field_name, 'does not exist')
 
     def get_field_python_type(self, field_name):
-        return self.get_field_packager(field_name).get_python_type()
+        try:
+            return self._get_field_packager(field_name).get_python_type()
+        except mpp.internal.api_impl.PackagerError:
+            raise FieldError(field_name, 'does not exist')
+
+
+    def is_field_non_zero(self, field_name):
+        try:
+            return self._get_field_packager(field_name).is_non_zero()
+        except mpp.internal.api_impl.PackagerError:
+            raise FieldError(field_name, 'does not exist')
+
+    def _get_field_packager(self, field_name):
+        if field_name in self.fields.keys():
+            return self.fields[field_name]
+        else:
+            raise mpp.internal.api_impl.PackagerError("unknown field " + field_name + " requested")
     
 class DataNotPackable(Exception):
     def __init__(self, namespace, field, value, packager, extra_message):
@@ -866,8 +750,9 @@ class Loader(object):
                     if space == None:
                         raise DataNotPackable(namespace, each[0], each[1], None, "The namespace has not been found")
                     
-                    packager = space.get_field_packager(each[0])
-                    if packager == None:
+                    try:
+                        packager = space._get_field_packager(each[0])
+                    except mpp.internal.api_impl.PackagerError:
                         raise DataNotPackable(namespace, each[0], each[1], None, "The field has not been found")
         
                     if space.support_regions != support_regions:
@@ -877,7 +762,7 @@ class Loader(object):
                         packed_data = packager.pack(each[1])
                         if packed_data == None:
                             continue
-                    except PackagerError:
+                    except mpp.internal.api_impl.PackagerError:
                         raise DataNotPackable(namespace, each[0], each[1], packager, "Packager raised exception")
                     
                     yield (each[0], packed_data)
@@ -950,9 +835,9 @@ class Loader(object):
             namespace = self.get_namespace(name)
             data = self.db.aggregate_rows(name, path_like = final_path_like)
             for field in data.keys():
-                if namespace.get_field_packager(field).get_python_type() == str:
+                if namespace.get_field_python_type(field) == str:
                     continue
-                data[field]['nonzero'] = namespace.get_field_packager(field).is_non_zero()
+                data[field]['nonzero'] = namespace.is_field_non_zero(field)
                 distribution = self.db.count_rows(name, path_like = final_path_like, group_by_column = field)
                 data[field]['distribution-bars'] = []
                 for each in distribution:
@@ -1032,13 +917,19 @@ class BasePlugin(object):
             return None
         return self.version
 
-    def set_plugin_loader(self, loader):
+    def _set_plugin_loader(self, loader):
         self.plugin_loader = loader
 
-    def get_plugin_loader(self):
+    def _get_plugin_loader(self):
         if hasattr(self, 'plugin_loader') == False:
             return None
         return self.plugin_loader
+    
+    def get_plugin(self, plugin_name):
+        return self._get_plugin_loader().get_plugin(plugin_name)
+
+    def get_action(self):
+        return self._get_plugin_loader().get_action()
 
 class Plugin(BasePlugin):
 
@@ -1059,7 +950,7 @@ class Plugin(BasePlugin):
         if hasattr(self, 'is_updated') == False:
             self.is_updated = False # original initialization
 
-        db_loader = self.get_plugin_loader().get_plugin('mpp.dbf').get_loader()
+        db_loader = self.get_plugin('mpp.dbf').get_loader()
 
         if namespace == None:
             namespace = self.get_name()
@@ -1208,10 +1099,10 @@ class Child(object):
         self.__getattribute__(callback_name)(parent, *args)
 
     def subscribe_by_parents_name(self, parent_name, callback_name='callback'):
-        self.get_plugin_loader().get_plugin(parent_name).subscribe(self, callback_name)
+        self.get_plugin(parent_name).subscribe(self, callback_name)
     
     def subscribe_by_parents_interface(self, interface, callback_name='callback'):
-        for plugin in self.get_plugin_loader().iterate_plugins():
+        for plugin in self._get_plugin_loader().iterate_plugins():
             if isinstance(plugin, interface):
                 plugin.subscribe(self, callback_name)
 

+ 2 - 2
mainline/mpp/dbf.py

@@ -25,7 +25,7 @@ import logging
 class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
     
     def declare_configuration(self, parser):
-        if self.get_plugin_loader().get_action() == 'collect':
+        if self.get_action() == 'collect':
             dbfile_help = "Path to a database file to create and write [default: %default]."
             dbfile_prev_help = ("Path to database file with data collected for the past/previous code revision."
                              " If it is set, the tool will do an incremental/iterative collection."
@@ -50,7 +50,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
         
     def initialize(self):
         
-        if self.get_plugin_loader().get_action() == 'collect':
+        if self.get_action() == 'collect':
             if os.path.exists(self.dbfile):
                 logging.warn("Removing existing file: " + self.dbfile)
                 try:

+ 153 - 0
mainline/mpp/internal/api_impl.py

@@ -0,0 +1,153 @@
+#
+#    Metrix++, Copyright 2009-2013, Metrix++ Project
+#    Link: http://metrixplusplus.sourceforge.net
+#    
+#    This file is a part of Metrix++ Tool.
+#    
+#    Metrix++ is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation, version 3 of the License.
+#    
+#    Metrix++ is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+#    GNU General Public License for more details.
+#    
+#    You should have received a copy of the GNU General Public License
+#    along with Metrix++.  If not, see <http://www.gnu.org/licenses/>.
+#
+
+class PackagerError(Exception):
+    def __init__(self, message=None):
+        if message == None:
+            Exception.__init__(self, "Failed to pack or unpack.")
+        else:
+            Exception.__init__(self, message)
+
+class PackagerFactory(object):
+
+    def create(self, python_type, non_zero):
+        if python_type == None:
+            return PackagerFactory.SkipPackager()
+        if python_type == int:
+            if non_zero == False:
+                return PackagerFactory.IntPackager()
+            else:
+                return PackagerFactory.IntNonZeroPackager()
+        if python_type == float and non_zero == False:
+            return PackagerFactory.FloatPackager()
+        if python_type == str:
+            return PackagerFactory.StringPackager()
+        
+        class PackagerFactoryError(Exception):
+            def __init__(self, python_type):
+                Exception.__init__(self, "Python type '" + str(python_type) + "' is not supported by the factory.")
+        raise PackagerFactoryError(python_type)
+    
+    def get_python_type(self, sql_type):
+        if sql_type == "integer":
+            return int
+        if sql_type == "real":
+            return float
+        if sql_type == "text":
+            return str
+
+        class PackagerFactoryError(Exception):
+            def __init__(self, sql_type):
+                Exception.__init__(self, "SQL type '" + str(sql_type) + "' is not supported by the factory.")
+        raise PackagerFactoryError(sql_type)
+
+    class IPackager(object):
+        def pack(self, unpacked_data):
+            assert False, "Internal interface not implemented"
+        def unpack(self, packed_data):
+            assert False, "Internal interface not implemented"
+        def get_sql_type(self):
+            assert False, "Internal interface not implemented"
+        def get_python_type(self):
+            assert False, "Internal interface not implemented"
+        def is_non_zero(self):
+            return False
+        
+    class IntPackager(IPackager):
+        def pack(self, unpacked_data):
+            if not isinstance(unpacked_data, int):
+                raise PackagerError()
+            return str(unpacked_data)
+            
+        def unpack(self, packed_data): 
+            try:
+                return int(packed_data)
+            except ValueError:
+                raise PackagerError()
+    
+        def get_sql_type(self):
+            return "integer"
+        
+        def get_python_type(self):
+            return int
+    
+    class IntNonZeroPackager(IntPackager):
+        def pack(self, unpacked_data):
+            if unpacked_data == 0:
+                raise PackagerError()
+            return PackagerFactory.IntPackager.pack(self, unpacked_data)
+        def is_non_zero(self):
+            return True
+
+    class FloatPackager(IPackager):
+        def pack(self, unpacked_data):
+            if not isinstance(unpacked_data, float):
+                raise PackagerError()
+            return str(unpacked_data)
+            
+        def unpack(self, packed_data): 
+            try:
+                return float(packed_data)
+            except ValueError:
+                raise PackagerError()
+    
+        def get_sql_type(self):
+            return "real"
+
+        def get_python_type(self):
+            return float
+
+    class FloatNonZeroPackager(FloatPackager):
+        def pack(self, unpacked_data):
+            if unpacked_data == 0:
+                raise PackagerError()
+            return PackagerFactory.FloatPackager.pack(self, unpacked_data)
+        def is_non_zero(self):
+            return True
+
+    class StringPackager(IPackager):
+        def pack(self, unpacked_data):
+            if not isinstance(unpacked_data, str):
+                raise PackagerError()
+            return str(unpacked_data)
+            
+        def unpack(self, packed_data): 
+            try:
+                return str(packed_data)
+            except ValueError:
+                raise PackagerError()
+    
+        def get_sql_type(self):
+            return "text"
+
+        def get_python_type(self):
+            return str
+    
+    class SkipPackager(IPackager):
+        def pack(self, unpacked_data):
+            return None
+            
+        def unpack(self, packed_data): 
+            return None
+    
+        def get_sql_type(self):
+            return None
+            
+        def get_python_type(self):
+            return None

+ 1 - 1
mainline/mpp/internal/loader.py

@@ -148,7 +148,7 @@ class Loader(object):
             item['instance'].__init__()
             item['instance'].set_name(item['package'] + "." + item['module'])
             item['instance'].set_version(item['version'])
-            item['instance'].set_plugin_loader(self)
+            item['instance']._set_plugin_loader(self)
             self.plugins.append(item)
             self.hash[plugin_name] = item
 

+ 1 - 1
mainline/tests/system/test_api_tutorial/ext/step3/myext/magic.py

@@ -33,5 +33,5 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.Child):
             self.subscribe_by_parents_interface(mpp.api.ICode, 'callback')
 
     def callback(self, parent, data, is_updated):
-        print parent, data.get_path(), is_updated
+        print parent.get_name(), data.get_path(), is_updated
         

+ 1 - 1
mainline/tests/system/test_api_tutorial/test_basic_collect_step3_stdout.gold.txt

@@ -1 +1 @@
-<std.code.cpp.Plugin object at 0x0000000002746B70> ./test.cpp True
+std.code.cpp ./test.cpp True