Browse Source

Merge branch 'master' into feature/#11-add-report-plugin

prozessorkern 1 year ago
parent
commit
a0c21cc58e
100 changed files with 16387 additions and 522 deletions
  1. 49 0
      .github/workflows/gh-pages.yml
  2. 31 0
      .github/workflows/python-package.yml
  3. 32 0
      .github/workflows/python-tests.yml
  4. 1 0
      .gitignore
  5. 18 0
      CHANGELOG.md
  6. 7 0
      Dockerfile
  7. 1 1
      LICENSE
  8. 13 2
      README.md
  9. 20 0
      docs/.gitignore
  10. 33 0
      docs/README.md
  11. 3 0
      docs/babel.config.js
  12. 7 0
      docs/docs/01-d-file.md
  13. 169 0
      docs/docs/01-u-overview.md
  14. 21 0
      docs/docs/02-u-download-install.md
  15. 26 0
      docs/docs/03-u-getting-started.md
  16. 506 0
      docs/docs/04-u-workflow.md
  17. 642 0
      docs/docs/05-u-extending-tool.md
  18. 16 0
      docs/docs/06-u-feeback-contribute.md
  19. 107 0
      docs/docusaurus.config.js
  20. 13177 0
      docs/package-lock.json
  21. 30 0
      docs/package.json
  22. 15 0
      docs/sidebars.js
  23. 35 0
      docs/src/css/custom.css
  24. BIN
      docs/src/img/piechart.png
  25. 127 0
      docs/src/pages/index.js
  26. 37 0
      docs/src/pages/styles.module.css
  27. 0 0
      docs/static/.nojekyll
  28. 1 0
      docs/static/img/analytics-24px.svg
  29. 1 0
      docs/static/img/bolt-24px.svg
  30. 1 0
      docs/static/img/done_all-24px.svg
  31. 1 0
      docs/static/img/extension-24px.svg
  32. BIN
      docs/static/img/favicon.png
  33. 1 0
      docs/static/img/perm_data_setting-24px.svg
  34. 1 0
      docs/static/img/speed-24px.svg
  35. 170 0
      docs/static/img/undraw_docusaurus_mountain.svg
  36. 169 0
      docs/static/img/undraw_docusaurus_react.svg
  37. 1 0
      docs/static/img/undraw_docusaurus_tree.svg
  38. 0 189
      ext/std/tools/collect.py
  39. 2 2
      metrix++.py
  40. 0 0
      metrixpp/__init__.py
  41. 0 0
      metrixpp/ext/__init__.py
  42. 0 0
      metrixpp/ext/std/__init__.py
  43. 0 0
      metrixpp/ext/std/code/__init__.py
  44. 0 0
      metrixpp/ext/std/code/complexity.ini
  45. 7 7
      ext/std/code/complexity.py
  46. 0 0
      metrixpp/ext/std/code/cpp.ini
  47. 20 20
      ext/std/code/cpp.py
  48. 0 0
      metrixpp/ext/std/code/cs.ini
  49. 19 19
      ext/std/code/cs.py
  50. 1 1
      ext/std/code/debug.ini
  51. 12 12
      ext/std/code/debug.py
  52. 0 0
      metrixpp/ext/std/code/filelines.ini
  53. 7 7
      ext/std/code/filelines.py
  54. 0 0
      metrixpp/ext/std/code/java.ini
  55. 16 16
      ext/std/code/java.py
  56. 0 0
      metrixpp/ext/std/code/length.ini
  57. 4 4
      ext/std/code/length.py
  58. 0 0
      metrixpp/ext/std/code/lines.ini
  59. 7 7
      ext/std/code/lines.py
  60. 15 0
      metrixpp/ext/std/code/longlines.ini
  61. 42 0
      metrixpp/ext/std/code/longlines.py
  62. 2 2
      ext/std/code/magic.ini
  63. 38 14
      ext/std/code/magic.py
  64. 0 0
      metrixpp/ext/std/code/member.ini
  65. 25 25
      ext/std/code/member.py
  66. 0 0
      metrixpp/ext/std/code/mi.ini
  67. 8 8
      ext/std/code/mi.py
  68. 15 0
      metrixpp/ext/std/code/ratio.ini
  69. 49 0
      metrixpp/ext/std/code/ratio.py
  70. 0 0
      metrixpp/ext/std/code/test.ini
  71. 8 8
      ext/std/code/test.py
  72. 0 0
      metrixpp/ext/std/code/todo.ini
  73. 10 10
      ext/std/code/todo.py
  74. 0 0
      metrixpp/ext/std/suppress.ini
  75. 16 16
      ext/std/suppress.py
  76. 0 0
      metrixpp/ext/std/tools/__init__.py
  77. 0 0
      metrixpp/ext/std/tools/collect.ini
  78. 386 0
      metrixpp/ext/std/tools/collect.py
  79. 1 1
      ext/std/tools/export.ini
  80. 9 9
      ext/std/tools/export.py
  81. 1 1
      ext/std/tools/info.ini
  82. 12 12
      ext/std/tools/info.py
  83. 0 0
      metrixpp/ext/std/tools/limit.ini
  84. 11 13
      ext/std/tools/limit.py
  85. 1 1
      ext/std/tools/limit_backend.ini
  86. 17 17
      ext/std/tools/limit_backend.py
  87. 1 1
      ext/std/tools/report.ini
  88. 24 26
      ext/std/tools/report.py
  89. 1 1
      ext/std/tools/view.ini
  90. 70 27
      ext/std/tools/view.py
  91. 7 18
      metrixpp.py
  92. 0 0
      metrixpp/mpp/__init__.py
  93. 45 17
      mpp/api.py
  94. 0 0
      metrixpp/mpp/cout.py
  95. 1 1
      mpp/dbf.ini
  96. 5 5
      mpp/dbf.py
  97. 0 0
      metrixpp/mpp/internal/__init__.py
  98. 3 2
      mpp/internal/api_impl.py
  99. 0 0
      metrixpp/mpp/internal/dbwrap.py
  100. 0 0
      mpp/internal/loader.py

+ 49 - 0
.github/workflows/gh-pages.yml

@@ -0,0 +1,49 @@
+# .github/workflows/deploy.yml
+
+name: github pages
+
+on:
+  push:
+    branches:
+      - master
+    paths:
+      - '.github/workflows/gh-pages.yml'
+      - 'docs/**'
+
+jobs:
+  deploy:
+    runs-on: ubuntu-18.04
+    defaults:
+      run:
+        working-directory: docs
+    steps:
+      - uses: actions/checkout@v2
+
+      - name: Setup Node
+        uses: actions/setup-node@v2.1.2
+        with:
+          node-version: '12.x'
+
+      # - name: Get yarn cache
+      #   id: yarn-cache
+      #   run: echo "::set-output name=dir::$(yarn cache dir)"
+
+      # - name: Cache dependencies
+      #   uses: actions/cache@v2
+      #   with:
+      #     path: ${{ steps.yarn-cache.outputs.dir }}
+      #     key: ${{ runner.os }}-website-${{ hashFiles('**/yarn.lock') }}
+      #     restore-keys: |
+      #       ${{ runner.os }}-website-
+
+      # - run: yarn install --frozen-lockfile
+      # - run: yarn build
+
+      - run: npm install
+      - run: npm run build
+
+      - name: Deploy
+        uses: peaceiris/actions-gh-pages@v3
+        with:
+          github_token: ${{ secrets.GITHUB_TOKEN }}
+          publish_dir: ./docs/build

+ 31 - 0
.github/workflows/python-package.yml

@@ -0,0 +1,31 @@
+# This workflows will upload a Python Package using Twine when a release is created
+# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
+
+name: Upload Python Package
+
+on:
+  release:
+    types: [created]
+
+jobs:
+  deploy:
+
+    runs-on: ubuntu-latest
+
+    steps:
+    - uses: actions/checkout@v2
+    - name: Set up Python
+      uses: actions/setup-python@v2
+      with:
+        python-version: '3.x'
+    - name: Install dependencies
+      run: |
+        python -m pip install --upgrade pip
+        pip install setuptools wheel twine
+    - name: Build and publish
+      env:
+        TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
+        TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
+      run: |
+        python setup.py sdist bdist_wheel
+        twine upload dist/*

+ 32 - 0
.github/workflows/python-tests.yml

@@ -0,0 +1,32 @@
+# This workflow will install Python dependencies, run tests with python2 and 3 on windows, macOS an ununtu
+# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
+
+name: Metrix++ Tests
+
+on:
+  push:
+  pull_request:
+
+jobs:
+  test:
+    name: Test on ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    
+    strategy:
+      matrix:
+        python: [2.7, 3.5, 3.6, 3.7, 3.8]
+        os: [ubuntu-latest, windows-latest, macOS-latest]
+
+    steps:
+    - uses: actions/checkout@v2
+    - name: Set up Python ${{ matrix.python }}
+      uses: actions/setup-python@v2
+      with:
+        python-version: ${{ matrix.python }}
+    - name: Install dependencies
+      run: |
+        python -m pip install --upgrade pip
+        pip install -r requirements.txt
+    - name: Run tests
+      run: |
+        python metrix++.py test

+ 1 - 0
.gitignore

@@ -6,3 +6,4 @@ tests/**/*.real.txt
 tests/**/*.diff.html
 tests/**/*.diff.html
 .idea
 .idea
 .vscode
 .vscode
+metrixpp/tests/**/*.db

+ 18 - 0
CHANGELOG.md

@@ -1,3 +1,21 @@
+## 1.7.1 (June, 2021)
+- improve C++ numbers parsing
+
+## 1.7 (December, 2020)
+- added Promehteous format for exporting/view
+- added std.code.longlines plugin
+
+## 1.6 (June, 2020)
+- added python3 support
+- added pypi package (metrixpp)
+- changed implementation of collect --include-files to include all files matching any rule
+- fix implementation of std.code.maintindex.simple
+
+## 1.5 (April, 2019)
+- project moved to github
+- fixed processing of more than one directory #73
+- improved limit tool to be able to apply different limits on different region types
+
 ## 1.4 (June, 2014)
 ## 1.4 (June, 2014)
 - Fixed match of names of generic functions and classes 
 - Fixed match of names of generic functions and classes 
 - New metrics group: std.code.member.* - counting of classes, fields, globals, etc.
 - New metrics group: std.code.member.* - counting of classes, fields, globals, etc.

+ 7 - 0
Dockerfile

@@ -0,0 +1,7 @@
+FROM python:alpine
+
+RUN pip install --no-cache-dir metrixpp
+
+ENTRYPOINT [ "metrix++" ]
+
+CMD [ "metrix++" ]

+ 1 - 1
LICENSE

@@ -1,6 +1,6 @@
 MIT License
 MIT License
 
 
-Copyright (c) 2009-2019 Metrix++ Team
+Copyright (c) 2009-2020 Metrix++ Team
 
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
 of this software and associated documentation files (the "Software"), to deal

+ 13 - 2
README.md

@@ -1,12 +1,23 @@
 # [Metrix++](https://github.com/metrixplusplus/metrixplusplus)
 # [Metrix++](https://github.com/metrixplusplus/metrixplusplus)
+
+![Metrix++ Tests](https://github.com/metrixplusplus/metrixplusplus/workflows/Metrix++%20Tests/badge.svg)
+![Upload Python Package](https://github.com/metrixplusplus/metrixplusplus/workflows/Upload%20Python%20Package/badge.svg)
+
 Metrix++ is an extendable tool for code metrics collection and analysis.
 Metrix++ is an extendable tool for code metrics collection and analysis.
 Check projects documentation for additional information:
 Check projects documentation for additional information:
 * [https://metrixplusplus.github.io/](https://metrixplusplus.github.io/)
 * [https://metrixplusplus.github.io/](https://metrixplusplus.github.io/)
 
 
 Thank you for using the tool!
 Thank you for using the tool!
 
 
-## Download & Install
-In order to get the tool working, [download the archive](https://github.com/metrixplusplus/metrixplusplus/releases)
+## Installation
+
+Metrix++ is published on [PyPi](https://pypi.org/project/metrixpp/) as `metrixpp` and can be installed using pip:
+
+```
+pip install metrixpp
+```
+
+Alternatively you can [download the archive](https://github.com/metrixplusplus/metrixplusplus/releases)
 with the latest stable version and unpack it to some folder.
 with the latest stable version and unpack it to some folder.
 The first run of the tool will trigger the installation within the folder, where it was launched.
 The first run of the tool will trigger the installation within the folder, where it was launched.
 
 

+ 20 - 0
docs/.gitignore

@@ -0,0 +1,20 @@
+# Dependencies
+/node_modules
+
+# Production
+/build
+
+# Generated files
+.docusaurus
+.cache-loader
+
+# Misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*

+ 33 - 0
docs/README.md

@@ -0,0 +1,33 @@
+# Website
+
+This website is built using [Docusaurus 2](https://v2.docusaurus.io/).
+
+### Installation
+
+```
+$ yarn
+```
+
+### Local Development
+
+```
+$ yarn start
+```
+
+This command starts a local development server and open up a browser window. Most changes are reflected live without having to restart the server.
+
+### Build
+
+```
+$ yarn build
+```
+
+This command generates static content into the `build` directory and can be served using any static contents hosting service.
+
+### Deployment
+
+```
+$ GIT_USER=<Your GitHub username> USE_SSH=true yarn deploy
+```
+
+If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.

+ 3 - 0
docs/babel.config.js

@@ -0,0 +1,3 @@
+module.exports = {
+  presets: [require.resolve('@docusaurus/core/lib/babel/preset')],
+};

+ 7 - 0
docs/docs/01-d-file.md

@@ -0,0 +1,7 @@
+---
+id: 01-d-file
+title: Overview
+sidebar_label: For developers
+---
+
+For developers

File diff suppressed because it is too large
+ 169 - 0
docs/docs/01-u-overview.md


+ 21 - 0
docs/docs/02-u-download-install.md

@@ -0,0 +1,21 @@
+---
+id: 02-u-download-install
+title: Download and install
+sidebar_label: Download and install
+---
+
+In order to get the tool working, [dowload the archive](https://github.com/metrixplusplus/metrixplusplus/releases) with the latest stable version and unpack it to some folder. The first run of the tool will trigger the installation within the folder, where it was launched.
+
+In order to checkout the latest development version from the [version control system](https://github.com/metrixplusplus/metrixplusplus) use this command:
+```sh
+> git clone https://github.com/metrixplusplus/metrixplusplus metrixplusplus
+```
+
+#### Change and Release Notes
+Change and release notes are available [here](https://github.com/metrixplusplus/metrixplusplus/blob/master/CHANGELOG.md).
+
+#### Prerequisites
+Python Runtime Environment (version 2.8.x or 3.x)
+
+#### License
+[MIT license](https://github.com/metrixplusplus/metrixplusplus/blob/master/LICENSE)

+ 26 - 0
docs/docs/03-u-getting-started.md

@@ -0,0 +1,26 @@
+---
+id: 03-u-getting-started
+title: Getting started
+sidebar_label: Getting started
+---
+
+The tool is relatively simple to use. There are 3 fundamental steps:
+
+* Collect the data, for example:
+```bash
+> python "/path/to/metrix++.py" collect --std.code.lines.code --std.code.complexity.cyclomatic
+```
+
+
+* View the data, for example:
+```bash
+> python "/path/to/metrix++.py" view
+```
+
+
+* Apply thresholds, for example:
+```bash
+> python "/path/to/metrix++.py" limit --max-limit=std.code.complexity:cyclomatic:7
+```
+
+Please, check the advanced [description of the workflow](04-u-workflow.md) with real examples.

File diff suppressed because it is too large
+ 506 - 0
docs/docs/04-u-workflow.md


File diff suppressed because it is too large
+ 642 - 0
docs/docs/05-u-extending-tool.md


+ 16 - 0
docs/docs/06-u-feeback-contribute.md

@@ -0,0 +1,16 @@
+---
+id: 06-u-feeback-contribute
+title: Feeback and contribute
+sidebar_label: Feeback and contribute
+---
+Now it is your turn. There are multiple ways how you can contribute and help to improve and progress Metrix++ project:
+
+* Try Metrix++ and [post review](https://github.com/metrixplusplus/metrixplusplus/issues/new)
+* [Submit new feature request or bug report](https://github.com/metrixplusplus/metrixplusplus/issues/new)
+* [Ask a question](https://github.com/metrixplusplus/metrixplusplus/issues/new)
+* Submit git pull request
+* Create and publish your plugin. [Request to refer](https://github.com/metrixplusplus/metrixplusplus/issues/new) to it from Metrix++ project space.
+* [Submit your plugin](https://github.com/metrixplusplus/metrixplusplus/issues/new) to include to the standard set
+* ... and consider to 
+<a href="mailto:avkonst@gmail.com?subject=Metrix%2B%2B Join Request" target="blank">join the project</a>
+!

+ 107 - 0
docs/docusaurus.config.js

@@ -0,0 +1,107 @@
+module.exports = {
+  title: 'Metrix++',
+  tagline: 'Management of source code quality is possible',
+  url: 'https://metrixplusplus.github.io/',
+  baseUrl: '/metrixplusplus/',
+  favicon: 'img/favicon.png',
+  projectName: 'metrixplusplus', // Usually your repo name.
+  themeConfig: {
+    navbar: {
+      title: 'Metrix++',
+      logo: {
+        alt: 'Metrix++',
+        src: 'img/favicon.png',
+      },
+      items: [
+        {
+          to: 'docs/01-u-overview',
+          activeBasePath: 'docs',
+          label: 'Docs',
+          position: 'left',
+        },
+        {
+          href: 'https://github.com/metrixplusplus/metrixplusplus',
+          label: 'GitHub',
+          position: 'right',
+        },
+      ],
+    },
+    footer: {
+      style: 'dark',
+      links: [
+        {
+          title: 'Docs',
+          items: [
+            {
+              label: 'Users Manual',
+              to: 'docs/01-u-overview',
+            },
+            {
+              label: 'Developers Manual',
+              to: 'docs/01-d-file',
+            },
+          ],
+        },
+        {
+          title: 'Community',
+          items: [
+            {
+              label: 'GitHub',
+              href: 'https://github.com/metrixplusplus/',
+            },
+            {
+              label: 'Open issues',
+              href: 'https://github.com/metrixplusplus/metrixplusplus/issues',
+            },
+            {
+              label: 'Changelog',
+              href: 'https://github.com/metrixplusplus/metrixplusplus/blob/master/CHANGELOG.md',
+            },
+          ],
+        },
+        {
+          title: 'Feedback',
+          items: [
+            {
+              label: 'Ask question',
+              href: 'https://github.com/metrixplusplus/metrixplusplus/issues/new',
+            },
+            {
+              label: 'Report defect',
+              href: 'https://github.com/metrixplusplus/metrixplusplus/issues/new',
+            },
+            {
+              label: 'Feature request',
+              href: 'https://github.com/metrixplusplus/metrixplusplus/issues/new',
+            },
+          ],
+        },
+      ],
+      copyright: `Copyright © 2009 - ${new Date().getFullYear()}, Metrix++ Project.`,
+    },
+  },
+  presets: [
+    [
+      '@docusaurus/preset-classic',
+      {
+        docs: {
+          // It is recommended to set document id as docs home page (`docs/` path).
+          homePageId: 'highlights',
+          sidebarPath: require.resolve('./sidebars.js'),
+          // Please change this to your repo.
+          editUrl:
+            'https://metrixplusplus.github.io/',
+        },
+        blog: {
+          showReadingTime: true,
+          // Please change this to your repo.
+          editUrl:
+            'https://metrixplusplus.github.io/',
+        },
+        theme: {
+          customCss: require.resolve('./src/css/custom.css'),
+        },
+      },
+    ],
+  ],
+};

File diff suppressed because it is too large
+ 13177 - 0
docs/package-lock.json


+ 30 - 0
docs/package.json

@@ -0,0 +1,30 @@
+{
+  "name": "docs",
+  "version": "0.0.0",
+  "private": true,
+  "scripts": {
+    "start": "docusaurus start",
+    "build": "docusaurus build",
+    "swizzle": "docusaurus swizzle",
+    "deploy": "docusaurus deploy"
+  },
+  "dependencies": {
+    "@docusaurus/core": "^2.0.0-alpha.58",
+    "@docusaurus/preset-classic": "^2.0.0-alpha.58",
+    "clsx": "^1.1.1",
+    "react": "^16.8.4",
+    "react-dom": "^16.8.4"
+  },
+  "browserslist": {
+    "production": [
+      ">0.2%",
+      "not dead",
+      "not op_mini all"
+    ],
+    "development": [
+      "last 1 chrome version",
+      "last 1 firefox version",
+      "last 1 safari version"
+    ]
+  }
+}

+ 15 - 0
docs/sidebars.js

@@ -0,0 +1,15 @@
+module.exports = {
+  someSidebar: {
+    'Users Manual': [
+        '01-u-overview',
+        '02-u-download-install',
+        '03-u-getting-started',
+        '04-u-workflow',
+        '05-u-extending-tool',
+        '06-u-feeback-contribute'
+    ],
+    'Developers Manual': [
+        '01-d-file'
+    ],
+  },
+};

+ 35 - 0
docs/src/css/custom.css

@@ -0,0 +1,35 @@
+/* stylelint-disable docusaurus/copyright-header */
+/**
+ * Any CSS included here will be global. The classic template
+ * bundles Infima by default. Infima is a CSS framework designed to
+ * work well for content-centric websites.
+ */
+
+/* You can override the default Infima variables here. */
+:root {
+  --ifm-color-primary: #25c2a0;
+  --ifm-color-primary-dark: rgb(33, 175, 144);
+  --ifm-color-primary-darker: rgb(31, 165, 136);
+  --ifm-color-primary-darkest: rgb(26, 136, 112);
+  --ifm-color-primary-light: rgb(70, 203, 174);
+  --ifm-color-primary-lighter: rgb(102, 212, 189);
+  --ifm-color-primary-lightest: rgb(146, 224, 208);
+  --ifm-code-font-size: 95%;
+}
+
+.docusaurus-highlight-code-line {
+  background-color: rgb(72, 77, 91);
+  display: block;
+  margin: 0 calc(-1 * var(--ifm-pre-padding));
+  padding: 0 var(--ifm-pre-padding);
+}
+
+.info{
+  background-color: #d9edf7;
+}
+.td-regular{
+  background-color: #FFFFFF !important;
+}
+.center-justified {
+  text-align: justify;
+}

BIN
docs/src/img/piechart.png


+ 127 - 0
docs/src/pages/index.js

@@ -0,0 +1,127 @@
+import React from 'react';
+import clsx from 'clsx';
+import Layout from '@theme/Layout';
+import Link from '@docusaurus/Link';
+import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
+import useBaseUrl from '@docusaurus/useBaseUrl';
+import styles from './styles.module.css';
+
+const features = [
+  {
+    title: <>Configurable</>,
+    imageUrl: 'img/perm_data_setting-24px.svg',
+    description: (
+      <>
+        Define and apply your rules and policies.
+        Integrate with your workflow.
+      </>
+    ),
+  },
+  {
+    title: <>Extendable via plugins</>,
+    imageUrl: 'img/extension-24px.svg',
+    description: (
+      <>
+        Define your custom metric.
+        Add new language parser.
+        Create advanced post-analysis tool.
+      </>
+    ),
+  },
+  {
+    title: <>Multiple metrics</>,
+    imageUrl: 'img/analytics-24px.svg',
+    description: (
+      <>
+        Complexity, size and other.
+      </>
+    ),
+  },
+  {
+    title: <>Multiple languages</>,
+    imageUrl: 'img/done_all-24px.svg',
+    description: (
+      <>
+        C/C++, C# and Java.
+        Recognizes classes, interfaces, namespaces, functions, comments, preprocessor and much more.
+      </>
+    ),
+  },
+  {
+    title: <>High performance and scalability</>,
+    imageUrl: 'img/speed-24px.svg',
+    description: (
+      <>
+        Applicable to huge code bases: thousands of files per minute.
+        Ultra-fast feedback on iterative re-run.
+      </>
+    ),
+  },
+  {
+    title: <>Effortless application to legacy code</>,
+    imageUrl: 'img/bolt-24px.svg',
+    description: (
+      <>
+        Recognises legacy, modified and new code.
+        Prevents from negative trends. Encourages positive.
+      </>
+    ),
+  },
+];
+
+function Feature({imageUrl, title, description}) {
+  const imgUrl = useBaseUrl(imageUrl);
+  return (
+    <div className={clsx('col col--4', styles.feature)}>
+      {imgUrl && (
+        <div className="text--center" style={{color:'green', fill: 'green'}}>
+          <img className={styles.featureImage} style={{fill:'green'}} src={imgUrl} alt={title} />
+        </div>
+      )}
+      <h3>{title}</h3>
+      <p>{description}</p>
+    </div>
+  );
+}
+
+function Home() {
+  const context = useDocusaurusContext();
+  const {siteConfig = {}} = context;
+  return (
+    <Layout
+      title={`${siteConfig.title}`}
+      description="Management of source code quality is possible">
+      <header className={clsx('hero hero--primary', styles.heroBanner)}>
+        <div className="container">
+          <h1 className="hero__title">{siteConfig.title}</h1>
+          <p className="hero__subtitle">{siteConfig.tagline}</p>
+          <div className={styles.buttons}>
+            <Link
+              className={clsx(
+                'button button--outline button--secondary button--lg',
+                styles.getStarted,
+              )}
+              to={useBaseUrl('docs/01-u-overview')}>
+              Get Started
+            </Link>
+          </div>
+        </div>
+      </header>
+      <main>
+        {features && features.length > 0 && (
+          <section className={styles.features}>
+            <div className="container">
+              <div className="row">
+                {features.map((props, idx) => (
+                  <Feature key={idx} {...props} />
+                ))}
+              </div>
+            </div>
+          </section>
+        )}
+      </main>
+    </Layout>
+  );
+}
+
+export default Home;

+ 37 - 0
docs/src/pages/styles.module.css

@@ -0,0 +1,37 @@
+/* stylelint-disable docusaurus/copyright-header */
+
+/**
+ * CSS files with the .module.css suffix will be treated as CSS modules
+ * and scoped locally.
+ */
+
+.heroBanner {
+  padding: 4rem 0;
+  text-align: center;
+  position: relative;
+  overflow: hidden;
+}
+
+@media screen and (max-width: 966px) {
+  .heroBanner {
+    padding: 2rem;
+  }
+}
+
+.buttons {
+  display: flex;
+  align-items: center;
+  justify-content: center;
+}
+
+.features {
+  display: flex;
+  align-items: center;
+  padding: 2rem 0;
+  width: 100%;
+}
+
+.featureImage {
+  height: 200px;
+  width: 200px;
+}

tests/general/test_basic/sources/.unused.cpp → docs/static/.nojekyll


+ 1 - 0
docs/static/img/analytics-24px.svg

@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><g><rect fill="none" height="24" width="24"/><g><path d="M19,3H5C3.9,3,3,3.9,3,5v14c0,1.1,0.9,2,2,2h14c1.1,0,2-0.9,2-2V5C21,3.9,20.1,3,19,3z M19,19H5V5h14V19z"/><rect height="5" width="2" x="7" y="12"/><rect height="10" width="2" x="15" y="7"/><rect height="3" width="2" x="11" y="14"/><rect height="2" width="2" x="11" y="10"/></g></g></svg>

+ 1 - 0
docs/static/img/bolt-24px.svg

@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><g><rect fill="none" height="24" width="24"/></g><g><path d="M11,21h-1l1-7H7.5c-0.88,0-0.33-0.75-0.31-0.78C8.48,10.94,10.42,7.54,13.01,3h1l-1,7h3.51c0.4,0,0.62,0.19,0.4,0.66 C12.97,17.55,11,21,11,21z"/></g></svg>

+ 1 - 0
docs/static/img/done_all-24px.svg

@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M18 7l-1.41-1.41-6.34 6.34 1.41 1.41L18 7zm4.24-1.41L11.66 16.17 7.48 12l-1.41 1.41L11.66 19l12-12-1.42-1.41zM.41 13.41L6 19l1.41-1.41L1.83 12 .41 13.41z"/></svg>

File diff suppressed because it is too large
+ 1 - 0
docs/static/img/extension-24px.svg


BIN
docs/static/img/favicon.png


File diff suppressed because it is too large
+ 1 - 0
docs/static/img/perm_data_setting-24px.svg


+ 1 - 0
docs/static/img/speed-24px.svg

@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M20.38 8.57l-1.23 1.85a8 8 0 0 1-.22 7.58H5.07A8 8 0 0 1 15.58 6.85l1.85-1.23A10 10 0 0 0 3.35 19a2 2 0 0 0 1.72 1h13.85a2 2 0 0 0 1.74-1 10 10 0 0 0-.27-10.44z"/><path d="M10.59 15.41a2 2 0 0 0 2.83 0l5.66-8.49-8.49 5.66a2 2 0 0 0 0 2.83z"/></svg>

File diff suppressed because it is too large
+ 170 - 0
docs/static/img/undraw_docusaurus_mountain.svg


File diff suppressed because it is too large
+ 169 - 0
docs/static/img/undraw_docusaurus_react.svg


File diff suppressed because it is too large
+ 1 - 0
docs/static/img/undraw_docusaurus_tree.svg


+ 0 - 189
ext/std/tools/collect.py

@@ -1,189 +0,0 @@
-#
-#    Metrix++, Copyright 2009-2019, Metrix++ Project
-#    Link: https://github.com/metrixplusplus/metrixplusplus
-#    
-#    This file is a part of Metrix++ Tool.
-#    
-
-
-import mpp.api
-
-import re
-import os
-import logging
-import time
-import binascii
-import fnmatch
-import multiprocessing.pool
-
-class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IConfigurable, mpp.api.IRunable):
-    
-    def __init__(self):
-        self.reader = DirectoryReader()
-        self.include_rules = []
-        self.exclude_rules = []
-        self.exclude_files = []
-        self.parsers       = []
-        super(Plugin, self).__init__()
-
-    def declare_configuration(self, parser):
-        parser.add_option("--std.general.proctime", "--sgpt", action="store_true", default=False,
-                         help="If the option is set (True), the tool measures processing time per file [default: %default]")
-        parser.add_option("--std.general.procerrors", "--sgpe", action="store_true", default=False,
-                         help="If the option is set (True), the tool counts number of processing/parsing errors per file [default: %default]")
-        parser.add_option("--std.general.size", "--sgs", action="store_true", default=False,
-                         help="If the option is set (True), the tool collects file size metric (in bytes) [default: %default]")
-        parser.add_option("--include-files", "--if", default=r'.*',
-                         help="Adds a regular expression pattern to include files in processing (files have to match any rule to be included) [default: %default]")
-        parser.add_option("--exclude-files", "--ef", default=r'^[.]',
-                         help="Adds a regular expression pattern to exclude files or directories from processing [default: %default]")
-        parser.add_option("--non-recursively", "--nr", action="store_true", default=False,
-                         help="If the option is set (True), sub-directories are not processed [default: %default]")
-        self.optparser = parser
-    
-    def configure(self, options):
-        self.is_proctime_enabled = options.__dict__['std.general.proctime']
-        self.is_procerrors_enabled = options.__dict__['std.general.procerrors']
-        self.is_size_enabled = options.__dict__['std.general.size']
-        try:
-            self.add_include_rule(re.compile(options.__dict__['include_files']))
-        except Exception as e:
-            self.optparser.error("option --include-files: " + str(e))
-        try:
-            self.add_exclude_rule(re.compile(options.__dict__['exclude_files']))
-        except Exception as e:
-            self.optparser.error("option --exclude-files: " + str(e))
-        self.non_recursively = options.__dict__['non_recursively']
-
-    def initialize(self):
-        fields = []
-        if self.is_proctime_enabled == True:
-            fields.append(self.Field('proctime', float))
-        if self.is_procerrors_enabled == True:
-            fields.append(self.Field('procerrors', int))
-        if self.is_size_enabled == True:
-            fields.append(self.Field('size', int))
-        super(Plugin, self).initialize(namespace='std.general', support_regions=False, fields=fields)
-        self.add_exclude_file(self.get_plugin('mpp.dbf').get_dbfile_path())
-        self.add_exclude_file(self.get_plugin('mpp.dbf').get_dbfile_prev_path())
-        
-    def run(self, args):
-        if len(args) == 0:
-            return self.reader.run(self, "./")
-        retcode = 0
-        for directory in args:
-            retcode += self.reader.run(self, directory)
-        return retcode
-        
-    def register_parser(self, fnmatch_exp_list, parser):
-        self.parsers.append((fnmatch_exp_list, parser))
-
-    def get_parser(self, file_path):
-        for parser in self.parsers:
-            for fnmatch_exp in parser[0]:
-                if fnmatch.fnmatch(file_path, fnmatch_exp):
-                    return parser[1]
-        return None
-
-    def add_include_rule(self, re_compiled_pattern):
-        self.include_rules.append(re_compiled_pattern)
-
-    def add_exclude_rule(self, re_compiled_pattern):
-        self.exclude_rules.append(re_compiled_pattern)
-
-    def add_exclude_file(self, file_path):
-        if file_path == None:
-            return
-        self.exclude_files.append(file_path)
-
-    def is_file_excluded(self, file_name):
-        # only apply the include rules to files - skip directories
-        if os.path.isfile(file_name):
-            for each in self.include_rules:
-                if re.match(each, os.path.basename(file_name)) != None:
-                    break;
-            # file is excluded if no include rule matches
-            else:
-                return True
-        # check exclude rules for both, files and directories
-        for each in self.exclude_rules:
-            if re.match(each, os.path.basename(file_name)) != None:
-                return True
-        # finally check if a file is excluded directly
-        for each in self.exclude_files:
-            if os.path.basename(each) == os.path.basename(file_name):
-                if os.stat(each) == os.stat(file_name):
-                    return True
-        return False 
-        
-class DirectoryReader():
-    
-    def run(self, plugin, directory):
-        
-        IS_TEST_MODE = False
-        if 'METRIXPLUSPLUS_TEST_MODE' in list(os.environ.keys()):
-            IS_TEST_MODE = True
-
-        def run_per_file(plugin, fname, full_path):
-            exit_code = 0
-            norm_path = re.sub(r'''[\\]''', "/", full_path)
-            if os.path.isabs(norm_path) == False and norm_path.startswith('./') == False:
-                norm_path = './' + norm_path
-            if plugin.is_file_excluded(norm_path) == False:
-                if os.path.isdir(full_path):
-                    if plugin.non_recursively == False:
-                        exit_code += run_recursively(plugin, full_path)
-                else:
-                    parser = plugin.get_parser(full_path)
-                    if parser == None:
-                        logging.info("Skipping: " + norm_path)
-                    else:
-                        logging.info("Processing: " + norm_path)
-                        ts = time.time()
-                        f = open(full_path, 'rU');
-                        text = f.read();
-                        f.close()
-                        checksum = binascii.crc32(text.encode('utf8')) & 0xffffffff # to match python 3
-                        
-                        db_loader = plugin.get_plugin('mpp.dbf').get_loader()
-                        (data, is_updated) = db_loader.create_file_data(norm_path, checksum, str(text))
-                        procerrors = parser.process(plugin, data, is_updated)
-                        if plugin.is_proctime_enabled == True:
-                            data.set_data('std.general', 'proctime',
-                                          (time.time() - ts) if IS_TEST_MODE == False else 0.01)
-                        if plugin.is_procerrors_enabled == True and procerrors != None and procerrors != 0:
-                            data.set_data('std.general', 'procerrors', procerrors)
-                        if plugin.is_size_enabled == True:
-                            data.set_data('std.general', 'size', len(text))
-                        db_loader.save_file_data(data)
-                        #logging.debug("-" * 60)
-                        exit_code += procerrors
-            else:
-                logging.info("Excluding: " + norm_path)
-            return exit_code
-        
-
-        #thread_pool = multiprocessing.pool.ThreadPool()
-        #def mp_worker(args):
-        #    run_per_file(args[0], args[1], args[2])
-        def run_recursively(plugin, directory):
-            exit_code = 0
-            #thread_pool.map(mp_worker,
-            #    [(plugin, f, os.path.join(subdir, f))
-            #        for subdir, dirs, files in os.walk(directory) for f in files])
-            for fname in sorted(os.listdir(directory)):
-                full_path = os.path.join(directory, fname)
-                exit_code += run_per_file(plugin, fname, full_path)
-            
-            return exit_code
-        
-        if os.path.exists(directory) == False:
-            logging.error("Skipping (does not exist): " + directory)
-            return 1
-        
-        if os.path.isdir(directory):
-            total_errors = run_recursively(plugin, directory)
-        else:
-            total_errors = run_per_file(plugin, os.path.basename(directory), directory)
-        total_errors = total_errors # used, warnings are per file if not zero
-        return 0 # ignore errors, collection is successful anyway

+ 2 - 2
metrix++.py

@@ -1,10 +1,10 @@
 #
 #
-#    Metrix++, Copyright 2009-2019, Metrix++ Project
+#    Metrix++, Copyright 2009-2020, Metrix++ Project
 #    Link: https://github.com/metrixplusplus/metrixplusplus
 #    Link: https://github.com/metrixplusplus/metrixplusplus
 #    
 #    
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
 if __name__ == '__main__':
 if __name__ == '__main__':
-    import metrixpp
+    from metrixpp import metrixpp
     metrixpp.start()
     metrixpp.start()

tests/general/test_basic/sources/dummy.txt → metrixpp/__init__.py


tests/general/test_basic/sources_changed/.unused.cpp → metrixpp/ext/__init__.py


ext/std/__init__.py → metrixpp/ext/std/__init__.py


ext/std/code/__init__.py → metrixpp/ext/std/code/__init__.py


ext/std/code/complexity.ini → metrixpp/ext/std/code/complexity.ini


+ 7 - 7
ext/std/code/complexity.py

@@ -5,11 +5,11 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 
 
 import re
 import re
 
 
-class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.IConfigurable):
+class Plugin(api.Plugin, api.MetricPluginMixin, api.Child, api.IConfigurable):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.complexity.cyclomatic", "--sccc", action="store_true", default=False,
         parser.add_option("--std.code.complexity.cyclomatic", "--sccc", action="store_true", default=False,
@@ -40,8 +40,8 @@ class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.I
                                 'std.code.cs': self.pattern_cs,
                                 'std.code.cs': self.pattern_cs,
                                 'std.code.java': self.pattern_java
                                 'std.code.java': self.pattern_java
                             },
                             },
-                            marker_type_mask=mpp.api.Marker.T.CODE,
-                            region_type_mask=mpp.api.Region.T.FUNCTION)
+                            marker_type_mask=api.Marker.T.CODE,
+                            region_type_mask=api.Region.T.FUNCTION)
         self.declare_metric(self.is_active_maxindent,
         self.declare_metric(self.is_active_maxindent,
                             self.Field('maxindent', int),
                             self.Field('maxindent', int),
                             {
                             {
@@ -49,8 +49,8 @@ class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.I
                                 'std.code.cs': (self.pattern_indent, self.MaxIndentCounter),
                                 'std.code.cs': (self.pattern_indent, self.MaxIndentCounter),
                                 'std.code.java': (self.pattern_indent, self.MaxIndentCounter),
                                 'std.code.java': (self.pattern_indent, self.MaxIndentCounter),
                             },
                             },
-                            marker_type_mask=mpp.api.Marker.T.CODE,
-                            region_type_mask=mpp.api.Region.T.FUNCTION)
+                            marker_type_mask=api.Marker.T.CODE,
+                            region_type_mask=api.Region.T.FUNCTION)
         
         
         super(Plugin, self).initialize(fields=self.get_fields())
         super(Plugin, self).initialize(fields=self.get_fields())
         
         
@@ -61,7 +61,7 @@ class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.I
                 'std.code.java'
                 'std.code.java'
             ])
             ])
 
 
-    class MaxIndentCounter(mpp.api.MetricPluginMixin.IterAssignCounter):
+    class MaxIndentCounter(api.MetricPluginMixin.IterAssignCounter):
         
         
         def __init__(self, *args, **kwargs):
         def __init__(self, *args, **kwargs):
             super(Plugin.MaxIndentCounter, self).__init__(*args, **kwargs)
             super(Plugin.MaxIndentCounter, self).__init__(*args, **kwargs)

ext/std/code/cpp.ini → metrixpp/ext/std/code/cpp.ini


+ 20 - 20
ext/std/code/cpp.py

@@ -8,10 +8,10 @@
 import re
 import re
 import binascii
 import binascii
 
 
-import mpp.api
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import cout
 
 
-class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigurable, mpp.api.ICode):
+class Plugin(api.Plugin, api.Parent, api.IParser, api.IConfigurable, api.ICode):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.cpp.files", default="*.c,*.h,*.cpp,*.hpp,*.cc,*.hh,*.cxx,*.hxx",
         parser.add_option("--std.code.cpp.files", default="*.c,*.h,*.cpp,*.hpp,*.cc,*.hh,*.cxx,*.hxx",
@@ -22,7 +22,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigura
         self.files.sort() # sorted list goes to properties
         self.files.sort() # sorted list goes to properties
         
         
     def initialize(self):
     def initialize(self):
-        mpp.api.Plugin.initialize(self, properties=[
+        api.Plugin.initialize(self, properties=[
             self.Property('files', ','.join(self.files))
             self.Property('files', ','.join(self.files))
         ])
         ])
         self.get_plugin('std.tools.collect').register_parser(self.files, self)
         self.get_plugin('std.tools.collect').register_parser(self.files, self)
@@ -51,7 +51,7 @@ class CppCodeParser(object):
                                                                       # Need to support new line separators in expense of efficiency?
                                                                       # Need to support new line separators in expense of efficiency?
                 | /\*\*/                                              # Match C style comments (empty comment line)
                 | /\*\*/                                              # Match C style comments (empty comment line)
                 | /([\\](?:\n|\r\n|\r))*\*.*?\*([\\](?:\n|\r\n|\r))*/ # Match C style comments
                 | /([\\](?:\n|\r\n|\r))*\*.*?\*([\\](?:\n|\r\n|\r))*/ # Match C style comments
-                | \'(?:\\.|[^\\\'])*\'                                # Match quoted strings
+                | (?<![0-9a-fA-F])\'(?:\\.|[^\\\'])*\'                                # Match quoted strings
                 | "(?:\\.|[^\\"])*"                                   # Match double quoted strings
                 | "(?:\\.|[^\\"])*"                                   # Match double quoted strings
                 | (((?<=\n|\r)|^)[ \t]*[#].*?[^\\](?=\n|\r\n|\r))     # Match preprocessor
                 | (((?<=\n|\r)|^)[ \t]*[#].*?[^\\](?=\n|\r\n|\r))     # Match preprocessor
                                                                       # NOTE: end of line is NOT consumed
                                                                       # NOTE: end of line is NOT consumed
@@ -123,17 +123,17 @@ class CppCodeParser(object):
         def add_regions_rec(self, data, blocks):
         def add_regions_rec(self, data, blocks):
             def get_type_id(data, named_type):
             def get_type_id(data, named_type):
                 if named_type == "function":
                 if named_type == "function":
-                    return mpp.api.Region.T.FUNCTION
+                    return api.Region.T.FUNCTION
                 elif named_type == "class":
                 elif named_type == "class":
-                    return mpp.api.Region.T.CLASS
+                    return api.Region.T.CLASS
                 elif named_type == "struct":
                 elif named_type == "struct":
-                    return mpp.api.Region.T.STRUCT
+                    return api.Region.T.STRUCT
                 elif named_type == "union":
                 elif named_type == "union":
-                    return mpp.api.Region.T.STRUCT
+                    return api.Region.T.STRUCT
                 elif named_type == "namespace":
                 elif named_type == "namespace":
-                    return mpp.api.Region.T.NAMESPACE
+                    return api.Region.T.NAMESPACE
                 elif named_type == "__global__":
                 elif named_type == "__global__":
-                    return mpp.api.Region.T.GLOBAL
+                    return api.Region.T.GLOBAL
                 else:
                 else:
                     assert(False)
                     assert(False)
             for each in blocks:
             for each in blocks:
@@ -164,15 +164,15 @@ class CppCodeParser(object):
         for m in re.finditer(self.regex_cpp, text):
         for m in re.finditer(self.regex_cpp, text):
             # Comment
             # Comment
             if text[m.start()] == '/':
             if text[m.start()] == '/':
-                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.COMMENT)
+                data.add_marker(m.start(), m.end(), api.Marker.T.COMMENT)
             
             
             # String
             # String
             elif text[m.start()] == '"' or text[m.start()] == '\'':
             elif text[m.start()] == '"' or text[m.start()] == '\'':
-                data.add_marker(m.start() + 1, m.end() - 1, mpp.api.Marker.T.STRING)
+                data.add_marker(m.start() + 1, m.end() - 1, api.Marker.T.STRING)
             
             
             # Preprocessor (including internal comments)
             # Preprocessor (including internal comments)
             elif text[m.start()] == ' ' or text[m.start()] == '\t' or text[m.start()] == '#':
             elif text[m.start()] == ' ' or text[m.start()] == '\t' or text[m.start()] == '#':
-                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.PREPROCESSOR)
+                data.add_marker(m.start(), m.end(), api.Marker.T.PREPROCESSOR)
 
 
             # Statement end
             # Statement end
             elif text[m.start()] == ';':
             elif text[m.start()] == ';':
@@ -223,9 +223,9 @@ class CppCodeParser(object):
                 if blocks[curblk]['indent_start'] == indent_current:
                 if blocks[curblk]['indent_start'] == indent_current:
                     next_block = reset_next_block(m.end())
                     next_block = reset_next_block(m.end())
                     if curblk == 0:
                     if curblk == 0:
-                        mpp.cout.notify(data.get_path(),
+                        cout.notify(data.get_path(),
                                          cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
                                          cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
-                                         mpp.cout.SEVERITY_WARNING,
+                                         cout.SEVERITY_WARNING,
                                          "Non-matching closing bracket '}' detected.")
                                          "Non-matching closing bracket '}' detected.")
                         count_mismatched_brackets += 1
                         count_mismatched_brackets += 1
                         continue
                         continue
@@ -240,9 +240,9 @@ class CppCodeParser(object):
                 # shift indent left
                 # shift indent left
                 indent_current -= 1
                 indent_current -= 1
                 if indent_current < 0:
                 if indent_current < 0:
-                    mpp.cout.notify(data.get_path(),
+                    cout.notify(data.get_path(),
                                      cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
                                      cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
-                                     mpp.cout.SEVERITY_WARNING,
+                                     cout.SEVERITY_WARNING,
                                      "Non-matching closing bracket '}' detected.")
                                      "Non-matching closing bracket '}' detected.")
                     count_mismatched_brackets += 1
                     count_mismatched_brackets += 1
                     indent_current = 0
                     indent_current = 0
@@ -284,9 +284,9 @@ class CppCodeParser(object):
 
 
         while indent_current > 0:
         while indent_current > 0:
             # log all
             # log all
-            mpp.cout.notify(data.get_path(),
+            cout.notify(data.get_path(),
                              cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, len(text))),
                              cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, len(text))),
-                             mpp.cout.SEVERITY_WARNING,
+                             cout.SEVERITY_WARNING,
                              "Non-matching opening bracket '{' detected.")
                              "Non-matching opening bracket '{' detected.")
             count_mismatched_brackets += 1
             count_mismatched_brackets += 1
             indent_current -= 1
             indent_current -= 1

ext/std/code/cs.ini → metrixpp/ext/std/code/cs.ini


+ 19 - 19
ext/std/code/cs.py

@@ -8,10 +8,10 @@
 import re
 import re
 import binascii
 import binascii
 
 
-import mpp.api
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import cout
 
 
-class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigurable, mpp.api.ICode):
+class Plugin(api.Plugin, api.Parent, api.IParser, api.IConfigurable, api.ICode):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.cs.files", default="*.cs",
         parser.add_option("--std.code.cs.files", default="*.cs",
@@ -22,7 +22,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigura
         self.files.sort() # sorted list goes to properties
         self.files.sort() # sorted list goes to properties
         
         
     def initialize(self):
     def initialize(self):
-        mpp.api.Plugin.initialize(self, properties=[
+        api.Plugin.initialize(self, properties=[
             self.Property('files', ','.join(self.files))
             self.Property('files', ','.join(self.files))
         ])
         ])
         self.get_plugin('std.tools.collect').register_parser(self.files, self)
         self.get_plugin('std.tools.collect').register_parser(self.files, self)
@@ -134,17 +134,17 @@ class CsCodeParser(object):
         def add_regions_rec(self, data, blocks):
         def add_regions_rec(self, data, blocks):
             def get_type_id(data, named_type):
             def get_type_id(data, named_type):
                 if named_type == "function":
                 if named_type == "function":
-                    return mpp.api.Region.T.FUNCTION
+                    return api.Region.T.FUNCTION
                 elif named_type == "class":
                 elif named_type == "class":
-                    return mpp.api.Region.T.CLASS
+                    return api.Region.T.CLASS
                 elif named_type == "struct":
                 elif named_type == "struct":
-                    return mpp.api.Region.T.STRUCT
+                    return api.Region.T.STRUCT
                 elif named_type == "namespace":
                 elif named_type == "namespace":
-                    return mpp.api.Region.T.NAMESPACE
+                    return api.Region.T.NAMESPACE
                 elif named_type == "interface":
                 elif named_type == "interface":
-                    return mpp.api.Region.T.INTERFACE
+                    return api.Region.T.INTERFACE
                 elif named_type == "__global__":
                 elif named_type == "__global__":
-                    return mpp.api.Region.T.GLOBAL
+                    return api.Region.T.GLOBAL
                 else:
                 else:
                     assert(False)
                     assert(False)
             for each in blocks:
             for each in blocks:
@@ -175,15 +175,15 @@ class CsCodeParser(object):
         for m in re.finditer(self.regex_cpp, text):
         for m in re.finditer(self.regex_cpp, text):
             # Comment
             # Comment
             if text[m.start()] == '/':
             if text[m.start()] == '/':
-                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.COMMENT)
+                data.add_marker(m.start(), m.end(), api.Marker.T.COMMENT)
             
             
             # String
             # String
             elif text[m.start()] == '"' or text[m.start()] == '\'':
             elif text[m.start()] == '"' or text[m.start()] == '\'':
-                data.add_marker(m.start() + 1, m.end() - 1, mpp.api.Marker.T.STRING)
+                data.add_marker(m.start() + 1, m.end() - 1, api.Marker.T.STRING)
             
             
             # Preprocessor (including internal comments)
             # Preprocessor (including internal comments)
             elif text[m.start()] == ' ' or text[m.start()] == '\t' or text[m.start()] == '#':
             elif text[m.start()] == ' ' or text[m.start()] == '\t' or text[m.start()] == '#':
-                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.PREPROCESSOR)
+                data.add_marker(m.start(), m.end(), api.Marker.T.PREPROCESSOR)
 
 
             # Statement end
             # Statement end
             elif text[m.start()] == ';':
             elif text[m.start()] == ';':
@@ -234,9 +234,9 @@ class CsCodeParser(object):
                 if blocks[curblk]['indent_start'] == indent_current:
                 if blocks[curblk]['indent_start'] == indent_current:
                     next_block = reset_next_block(m.end())
                     next_block = reset_next_block(m.end())
                     if curblk == 0:
                     if curblk == 0:
-                        mpp.cout.notify(data.get_path(),
+                        cout.notify(data.get_path(),
                                          cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
                                          cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
-                                         mpp.cout.SEVERITY_WARNING,
+                                         cout.SEVERITY_WARNING,
                                          "Non-matching closing bracket '}' detected.")
                                          "Non-matching closing bracket '}' detected.")
                         count_mismatched_brackets += 1
                         count_mismatched_brackets += 1
                         continue
                         continue
@@ -251,9 +251,9 @@ class CsCodeParser(object):
                 # shift indent left
                 # shift indent left
                 indent_current -= 1
                 indent_current -= 1
                 if indent_current < 0:
                 if indent_current < 0:
-                    mpp.cout.notify(data.get_path(),
+                    cout.notify(data.get_path(),
                                      cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
                                      cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
-                                     mpp.cout.SEVERITY_WARNING,
+                                     cout.SEVERITY_WARNING,
                                      "Non-matching closing bracket '}' detected.")
                                      "Non-matching closing bracket '}' detected.")
                     count_mismatched_brackets += 1
                     count_mismatched_brackets += 1
                     indent_current = 0
                     indent_current = 0
@@ -296,9 +296,9 @@ class CsCodeParser(object):
 
 
         while indent_current > 0:
         while indent_current > 0:
             # log all
             # log all
-            mpp.cout.notify(data.get_path(),
+            cout.notify(data.get_path(),
                              cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, len(text))),
                              cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, len(text))),
-                             mpp.cout.SEVERITY_WARNING,
+                             cout.SEVERITY_WARNING,
                              "Non-matching opening bracket '{' detected.")
                              "Non-matching opening bracket '{' detected.")
             count_mismatched_brackets += 1
             count_mismatched_brackets += 1
             indent_current -= 1
             indent_current -= 1

+ 1 - 1
ext/std/code/debug.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 package: std.tools
 module:  debug
 module:  debug
 class:   Plugin
 class:   Plugin
-depends: mpp.dbf
+depends: metrixpp.mpp.dbf
 actions: debug
 actions: debug
 enabled: False
 enabled: False

+ 12 - 12
ext/std/code/debug.py

@@ -8,10 +8,10 @@
 import logging
 import logging
 import cgi
 import cgi
 
 
-import mpp.api
-import mpp.utils
+from metrixpp.mpp import api
+from metrixpp.mpp import utils
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
+class Plugin(api.Plugin, api.IConfigurable, api.IRunable):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("-m", "--mode", default='dumphtml', choices=['dumphtml'],
         parser.add_option("-m", "--mode", default='dumphtml', choices=['dumphtml'],
@@ -21,7 +21,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
         self.mode = options.__dict__['mode']
         self.mode = options.__dict__['mode']
 
 
     def run(self, args):
     def run(self, args):
-        loader = self.get_plugin('mpp.dbf').get_loader()
+        loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
     
     
         if self.mode == 'dumphtml':
         if self.mode == 'dumphtml':
             return dumphtml(args, loader)
             return dumphtml(args, loader)
@@ -32,11 +32,11 @@ def dumphtml(args, loader):
     result = ""
     result = ""
     result += '<html><body>'
     result += '<html><body>'
     for path in args:
     for path in args:
-        path = mpp.utils.preprocess_path(path)
+        path = utils.preprocess_path(path)
         
         
         data = loader.load_file_data(path)
         data = loader.load_file_data(path)
         if data == None:
         if data == None:
-            mpp.utils.report_bad_path(path)
+            utils.report_bad_path(path)
             exit_code += 1
             exit_code += 1
             continue
             continue
         
         
@@ -52,15 +52,15 @@ def dumphtml(args, loader):
         # TODO fix highlightning of markers
         # TODO fix highlightning of markers
 #        result += '<table><tr><td><pre>'
 #        result += '<table><tr><td><pre>'
 #        last_pos = 0
 #        last_pos = 0
-#        for marker in data.iterate_markers(filter_group= mpp.api.Marker.T.COMMENT |
-#                                           mpp.api.Marker.T.STRING |
-#                                           mpp.api.Marker.T.PREPROCESSOR):
+#        for marker in data.iterate_markers(filter_group= api.Marker.T.COMMENT |
+#                                           api.Marker.T.STRING |
+#                                           api.Marker.T.PREPROCESSOR):
 #            result += (cgi.escape(text[last_pos:marker.begin]))
 #            result += (cgi.escape(text[last_pos:marker.begin]))
-#            if marker.get_type() == mpp.api.Marker.T.STRING:
+#            if marker.get_type() == api.Marker.T.STRING:
 #                result += ('<span style="color:#0000FF">')
 #                result += ('<span style="color:#0000FF">')
-#            elif marker.get_type() == mpp.api.Marker.T.COMMENT:
+#            elif marker.get_type() == api.Marker.T.COMMENT:
 #                result += ('<span style="color:#009900">')
 #                result += ('<span style="color:#009900">')
-#            elif marker.get_type() == mpp.api.Marker.T.PREPROCESSOR:
+#            elif marker.get_type() == api.Marker.T.PREPROCESSOR:
 #                result += ('<span style="color:#990000">')
 #                result += ('<span style="color:#990000">')
 #            else:
 #            else:
 #                assert False, "Uknown marker type"
 #                assert False, "Uknown marker type"

ext/std/code/filelines.ini → metrixpp/ext/std/code/filelines.ini


+ 7 - 7
ext/std/code/filelines.py

@@ -5,10 +5,10 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 import re
 import re
 
 
-class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.IConfigurable):
+class Plugin(api.Plugin, api.MetricPluginMixin, api.Child, api.IConfigurable):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.filelines.code", "--scflc", action="store_true", default=False,
         parser.add_option("--std.code.filelines.code", "--scflc", action="store_true", default=False,
@@ -40,23 +40,23 @@ class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.I
         self.declare_metric(self.is_active_code,
         self.declare_metric(self.is_active_code,
                        self.Field('code', int),
                        self.Field('code', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.CODE | mpp.api.Marker.T.STRING,
+                       api.Marker.T.CODE | api.Marker.T.STRING,
                        merge_markers=True)
                        merge_markers=True)
         self.declare_metric(self.is_active_preprocessor,
         self.declare_metric(self.is_active_preprocessor,
                        self.Field('preprocessor', int),
                        self.Field('preprocessor', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.PREPROCESSOR)
+                       api.Marker.T.PREPROCESSOR)
         self.declare_metric(self.is_active_comments,
         self.declare_metric(self.is_active_comments,
                        self.Field('comments', int),
                        self.Field('comments', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.COMMENT)
+                       api.Marker.T.COMMENT)
         self.declare_metric(self.is_active_total,
         self.declare_metric(self.is_active_total,
                        self.Field('total', int),
                        self.Field('total', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.ANY,
+                       api.Marker.T.ANY,
                        merge_markers=True)
                        merge_markers=True)
 
 
         super(Plugin, self).initialize(fields=self.get_fields(), support_regions=False)
         super(Plugin, self).initialize(fields=self.get_fields(), support_regions=False)
 
 
         if self.is_active() == True:
         if self.is_active() == True:
-            self.subscribe_by_parents_interface(mpp.api.ICode)
+            self.subscribe_by_parents_interface(api.ICode)

ext/std/code/java.ini → metrixpp/ext/std/code/java.ini


+ 16 - 16
ext/std/code/java.py

@@ -8,10 +8,10 @@
 import re
 import re
 import binascii
 import binascii
 
 
-import mpp.api
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import cout
 
 
-class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigurable, mpp.api.ICode):
+class Plugin(api.Plugin, api.Parent, api.IParser, api.IConfigurable, api.ICode):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.java.files", default="*.java",
         parser.add_option("--std.code.java.files", default="*.java",
@@ -22,7 +22,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Parent, mpp.api.IParser, mpp.api.IConfigura
         self.files.sort() # sorted list goes to properties
         self.files.sort() # sorted list goes to properties
         
         
     def initialize(self):
     def initialize(self):
-        mpp.api.Plugin.initialize(self, properties=[
+        api.Plugin.initialize(self, properties=[
             self.Property('files', ','.join(self.files))
             self.Property('files', ','.join(self.files))
         ])
         ])
         self.get_plugin('std.tools.collect').register_parser(self.files, self)
         self.get_plugin('std.tools.collect').register_parser(self.files, self)
@@ -111,13 +111,13 @@ class JavaCodeParser(object):
         def add_regions_rec(self, data, blocks):
         def add_regions_rec(self, data, blocks):
             def get_type_id(data, named_type):
             def get_type_id(data, named_type):
                 if named_type == "function":
                 if named_type == "function":
-                    return mpp.api.Region.T.FUNCTION
+                    return api.Region.T.FUNCTION
                 elif named_type == "class":
                 elif named_type == "class":
-                    return mpp.api.Region.T.CLASS
+                    return api.Region.T.CLASS
                 elif named_type == "interface":
                 elif named_type == "interface":
-                    return mpp.api.Region.T.INTERFACE
+                    return api.Region.T.INTERFACE
                 elif named_type == "__global__":
                 elif named_type == "__global__":
-                    return mpp.api.Region.T.GLOBAL
+                    return api.Region.T.GLOBAL
                 else:
                 else:
                     assert(False)
                     assert(False)
             for each in blocks:
             for each in blocks:
@@ -148,11 +148,11 @@ class JavaCodeParser(object):
         for m in re.finditer(self.regex_cpp, text):
         for m in re.finditer(self.regex_cpp, text):
             # Comment
             # Comment
             if text[m.start()] == '/':
             if text[m.start()] == '/':
-                data.add_marker(m.start(), m.end(), mpp.api.Marker.T.COMMENT)
+                data.add_marker(m.start(), m.end(), api.Marker.T.COMMENT)
             
             
             # String
             # String
             elif text[m.start()] == '"' or text[m.start()] == '\'':
             elif text[m.start()] == '"' or text[m.start()] == '\'':
-                data.add_marker(m.start() + 1, m.end() - 1, mpp.api.Marker.T.STRING)
+                data.add_marker(m.start() + 1, m.end() - 1, api.Marker.T.STRING)
             
             
             # Statement end
             # Statement end
             elif text[m.start()] == ';':
             elif text[m.start()] == ';':
@@ -191,9 +191,9 @@ class JavaCodeParser(object):
                 if blocks[curblk]['indent_start'] == indent_current:
                 if blocks[curblk]['indent_start'] == indent_current:
                     next_block = reset_next_block(m.end())
                     next_block = reset_next_block(m.end())
                     if curblk == 0:
                     if curblk == 0:
-                        mpp.cout.notify(data.get_path(),
+                        cout.notify(data.get_path(),
                                          cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
                                          cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
-                                         mpp.cout.SEVERITY_WARNING,
+                                         cout.SEVERITY_WARNING,
                                          "Non-matching closing bracket '}' detected.")
                                          "Non-matching closing bracket '}' detected.")
                         count_mismatched_brackets += 1
                         count_mismatched_brackets += 1
                         continue
                         continue
@@ -208,9 +208,9 @@ class JavaCodeParser(object):
                 # shift indent left
                 # shift indent left
                 indent_current -= 1
                 indent_current -= 1
                 if indent_current < 0:
                 if indent_current < 0:
-                    mpp.cout.notify(data.get_path(),
+                    cout.notify(data.get_path(),
                                      cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
                                      cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, m.start())),
-                                     mpp.cout.SEVERITY_WARNING,
+                                     cout.SEVERITY_WARNING,
                                      "Non-matching closing bracket '}' detected.")
                                      "Non-matching closing bracket '}' detected.")
                     count_mismatched_brackets += 1
                     count_mismatched_brackets += 1
                     indent_current = 0
                     indent_current = 0
@@ -250,9 +250,9 @@ class JavaCodeParser(object):
 
 
         while indent_current > 0:
         while indent_current > 0:
             # log all
             # log all
-            mpp.cout.notify(data.get_path(),
+            cout.notify(data.get_path(),
                              cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, len(text))),
                              cursor_current + len(self.regex_ln.findall(text, cursor_last_pos, len(text))),
-                             mpp.cout.SEVERITY_WARNING,
+                             cout.SEVERITY_WARNING,
                              "Non-matching opening bracket '{' detected.")
                              "Non-matching opening bracket '{' detected.")
             count_mismatched_brackets += 1
             count_mismatched_brackets += 1
             indent_current -= 1
             indent_current -= 1

ext/std/code/length.ini → metrixpp/ext/std/code/length.ini


+ 4 - 4
ext/std/code/length.py

@@ -5,9 +5,9 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 
 
-class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
+class Plugin(api.Plugin, api.Child, api.IConfigurable):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.length.total", "--sclent", action="store_true", default=False,
         parser.add_option("--std.code.length.total", "--sclent", action="store_true", default=False,
@@ -20,10 +20,10 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
         fields = []
         fields = []
         if self.is_active == True:
         if self.is_active == True:
             fields.append(self.Field('total', int))
             fields.append(self.Field('total', int))
-        mpp.api.Plugin.initialize(self, fields=fields)
+        api.Plugin.initialize(self, fields=fields)
         
         
         if len(fields) != 0:
         if len(fields) != 0:
-            self.subscribe_by_parents_interface(mpp.api.ICode)
+            self.subscribe_by_parents_interface(api.ICode)
 
 
     def callback(self, parent, data, is_updated):
     def callback(self, parent, data, is_updated):
         is_updated = is_updated or self.is_updated
         is_updated = is_updated or self.is_updated

ext/std/code/lines.ini → metrixpp/ext/std/code/lines.ini


+ 7 - 7
ext/std/code/lines.py

@@ -5,10 +5,10 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 import re
 import re
 
 
-class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.IConfigurable):
+class Plugin(api.Plugin, api.MetricPluginMixin, api.Child, api.IConfigurable):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.lines.code", "--sclc", action="store_true", default=False,
         parser.add_option("--std.code.lines.code", "--sclc", action="store_true", default=False,
@@ -40,23 +40,23 @@ class Plugin(mpp.api.Plugin, mpp.api.MetricPluginMixin, mpp.api.Child, mpp.api.I
         self.declare_metric(self.is_active_code,
         self.declare_metric(self.is_active_code,
                        self.Field('code', int),
                        self.Field('code', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.CODE | mpp.api.Marker.T.STRING,
+                       api.Marker.T.CODE | api.Marker.T.STRING,
                        merge_markers=True)
                        merge_markers=True)
         self.declare_metric(self.is_active_preprocessor,
         self.declare_metric(self.is_active_preprocessor,
                        self.Field('preprocessor', int),
                        self.Field('preprocessor', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.PREPROCESSOR)
+                       api.Marker.T.PREPROCESSOR)
         self.declare_metric(self.is_active_comments,
         self.declare_metric(self.is_active_comments,
                        self.Field('comments', int),
                        self.Field('comments', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.COMMENT)
+                       api.Marker.T.COMMENT)
         self.declare_metric(self.is_active_total,
         self.declare_metric(self.is_active_total,
                        self.Field('total', int),
                        self.Field('total', int),
                        self.pattern_line,
                        self.pattern_line,
-                       mpp.api.Marker.T.ANY,
+                       api.Marker.T.ANY,
                        merge_markers=True)
                        merge_markers=True)
 
 
         super(Plugin, self).initialize(fields=self.get_fields())
         super(Plugin, self).initialize(fields=self.get_fields())
 
 
         if self.is_active() == True:
         if self.is_active() == True:
-            self.subscribe_by_parents_interface(mpp.api.ICode)
+            self.subscribe_by_parents_interface(api.ICode)

+ 15 - 0
metrixpp/ext/std/code/longlines.ini

@@ -0,0 +1,15 @@
+;
+;    Metrix++, Copyright 2009-2019, Metrix++ Project
+;    Link: https://github.com/metrixplusplus/metrixplusplus
+;
+;    This file is a part of Metrix++ Tool.
+;
+
+[Plugin]
+version: 1.1
+package: std.code
+module:  longlines
+class:   Plugin
+depends: None
+actions: collect
+enabled: True

+ 42 - 0
metrixpp/ext/std/code/longlines.py

@@ -0,0 +1,42 @@
+#
+#    Metrix++, Copyright 2009-2019, Metrix++ Project
+#    Link: https://github.com/metrixplusplus/metrixplusplus
+#
+#    This file is a part of Metrix++ Tool.
+#
+
+from metrixpp.mpp import api
+import re
+
+class Plugin(api.Plugin,
+             api.IConfigurable,
+             api.Child,
+             api.MetricPluginMixin):
+
+    def declare_configuration(self, parser):
+        parser.add_option("--std.code.longlines", "--scll",
+            action="store_true", default=False,
+            help="Enables collection of long lines metric [default: %default]")
+        parser.add_option("--std.code.longlines.limit", "--sclll",
+            default=80,
+            help="Modifies the limit for maximum line-length [default: %default]")
+
+    def configure(self, options):
+        self.is_active_ll = options.__dict__['std.code.longlines']
+        self.threshold = int(options.__dict__['std.code.longlines.limit'])
+
+    def initialize(self):
+        pattern_to_search = r'''.{%s,}''' % (self.threshold + 1)
+        self.declare_metric(
+                self.is_active_ll,
+                self.Field('numbers', int, non_zero=True),
+                re.compile(pattern_to_search),
+                marker_type_mask=api.Marker.T.CODE,
+                region_type_mask=api.Region.T.ANY,
+                exclude_subregions=True)
+
+        super(Plugin, self).initialize(fields=self.get_fields())
+
+        if self.is_active_ll == True:
+            self.subscribe_by_parents_interface(api.ICode)
+

+ 2 - 2
ext/std/code/magic.ini

@@ -6,10 +6,10 @@
 ;    
 ;    
 
 
 [Plugin]
 [Plugin]
-version: 1.2
+version: 1.3
 package: std.code
 package: std.code
 module:  magic
 module:  magic
 class:   Plugin
 class:   Plugin
 depends: None
 depends: None
 actions: collect
 actions: collect
-enabled: True
+enabled: True

+ 38 - 14
ext/std/code/magic.py

@@ -5,13 +5,13 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 import re
 import re
 
 
-class Plugin(mpp.api.Plugin,
-             mpp.api.IConfigurable,
-             mpp.api.Child,
-             mpp.api.MetricPluginMixin):
+class Plugin(api.Plugin,
+             api.IConfigurable,
+             api.Child,
+             api.MetricPluginMixin):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.magic.numbers", "--scmn",
         parser.add_option("--std.code.magic.numbers", "--scmn",
@@ -27,30 +27,54 @@ class Plugin(mpp.api.Plugin,
         self.is_active_numbers_simplier = options.__dict__['std.code.magic.numbers.simplier']
         self.is_active_numbers_simplier = options.__dict__['std.code.magic.numbers.simplier']
     
     
     def initialize(self):
     def initialize(self):
+        # C++ Source: https://en.cppreference.com/w/cpp/language/integer_literal
+        # C Source: https://en.cppreference.com/w/c/language/integer_constant
+        cpp_number_patterns = []
+        cpp_number_patterns.append(r'''[1-9]('?[0-9])*''')
+        cpp_number_patterns.append(r'''0('?[0-7])*''')  # 0 is here
+        cpp_number_patterns.append(r'''0[xX][0-9a-fA-F]('?[0-9a-fA-F])*''')
+        cpp_number_patterns.append(r'''0[bB][01]('?[01])*''')
+
+        cpp_number_suffix = r'(ll|LL|[lLzZ])'
+        cpp_number_suffix = r'([uU]?{s}?|{s}[uU])'.format(s=cpp_number_suffix)
+
+        cpp_number_pattern = r'({}){}'.format(r'|'.join(cpp_number_patterns),
+                                              cpp_number_suffix)
+
         pattern_to_search_java = re.compile(
         pattern_to_search_java = re.compile(
-            r'''((const(\s+[_$a-zA-Z][_$a-zA-Z0-9]*)+\s*[=]\s*)[-+]?[0-9]+\b)|(\b[0-9]+\b)''')
-        pattern_to_search_cpp_cs = re.compile(
-            r'''((const(\s+[_a-zA-Z][_a-zA-Z0-9]*)+\s*[=]\s*)[-+]?[0-9]+\b)|(\b[0-9]+\b)''')
+            r'''((const(\s+[_$a-zA-Z][_$a-zA-Z0-9]*)+\s*[=]\s*)[-+]?[0-9]+\b)'''
+            r'''|(\b[0-9]+\b)''')
+        pattern_to_search_cpp = re.compile(
+            r'''((const(expr)?(\s+[_a-zA-Z][_a-zA-Z0-9]*)+\s*[=]\s*)[-+]?''' +
+            cpp_number_pattern + r'''\b)'''
+            r'''|(virtual\s+.*\s*[=]\s*[0]\s*[,;])'''
+            r'''|(override\s+[=]\s*[0]\s*[,;])'''
+            r'''|(\b''' + cpp_number_pattern + r'''\b)''')
+        pattern_to_search_cs = re.compile(
+            r'''((const(\s+[_a-zA-Z][_a-zA-Z0-9]*)+\s*[=]\s*)[-+]?[0-9]+\b)'''
+            r'''|(\b[0-9]+\b)''')
         self.declare_metric(self.is_active_numbers,
         self.declare_metric(self.is_active_numbers,
                             self.Field('numbers', int,
                             self.Field('numbers', int,
                                 non_zero=True),
                                 non_zero=True),
                             {
                             {
                              'std.code.java': (pattern_to_search_java, self.NumbersCounter),
                              'std.code.java': (pattern_to_search_java, self.NumbersCounter),
-                             'std.code.cpp': (pattern_to_search_cpp_cs, self.NumbersCounter),
-                             'std.code.cs': (pattern_to_search_cpp_cs, self.NumbersCounter),
+                             'std.code.cpp': (pattern_to_search_cpp, self.NumbersCounter),
+                             'std.code.cs': (pattern_to_search_cs, self.NumbersCounter),
                             },
                             },
-                            marker_type_mask=mpp.api.Marker.T.CODE,
-                            region_type_mask=mpp.api.Region.T.ANY)
+                            marker_type_mask=api.Marker.T.CODE,
+                            region_type_mask=api.Region.T.ANY)
         
         
         super(Plugin, self).initialize(fields=self.get_fields(),
         super(Plugin, self).initialize(fields=self.get_fields(),
             properties=[self.Property('number.simplier', self.is_active_numbers_simplier)])
             properties=[self.Property('number.simplier', self.is_active_numbers_simplier)])
         
         
         if self.is_active() == True:
         if self.is_active() == True:
-            self.subscribe_by_parents_interface(mpp.api.ICode)
+            self.subscribe_by_parents_interface(api.ICode)
 
 
-    class NumbersCounter(mpp.api.MetricPluginMixin.IterIncrementCounter):
+    class NumbersCounter(api.MetricPluginMixin.IterIncrementCounter):
         def increment(self, match):
         def increment(self, match):
             if (match.group(0).startswith('const') or
             if (match.group(0).startswith('const') or
+                match.group(0).startswith('virtual') or
+                match.group(0).startswith('override') or
                 (self.plugin.is_active_numbers_simplier == True and
                 (self.plugin.is_active_numbers_simplier == True and
                  match.group(0) in ['0', '1', '-1', '+1'])):
                  match.group(0) in ['0', '1', '-1', '+1'])):
                 return 0
                 return 0

ext/std/code/member.ini → metrixpp/ext/std/code/member.ini


+ 25 - 25
ext/std/code/member.py

@@ -5,13 +5,13 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 import re
 import re
 
 
-class Plugin(mpp.api.Plugin,
-             mpp.api.IConfigurable,
-             mpp.api.Child,
-             mpp.api.MetricPluginMixin):
+class Plugin(api.Plugin,
+             api.IConfigurable,
+             api.Child,
+             api.MetricPluginMixin):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.code.member.fields", "--scmf",
         parser.add_option("--std.code.member.fields", "--scmf",
@@ -73,9 +73,9 @@ class Plugin(mpp.api.Plugin,
                              'std.code.cpp': pattern_to_search_cpp,
                              'std.code.cpp': pattern_to_search_cpp,
                              'std.code.cs': pattern_to_search_cs,
                              'std.code.cs': pattern_to_search_cs,
                             },
                             },
-                            marker_type_mask=mpp.api.Marker.T.CODE,
-                            region_type_mask=mpp.api.Region.T.CLASS |
-                            mpp.api.Region.T.STRUCT | mpp.api.Region.T.INTERFACE)
+                            marker_type_mask=api.Marker.T.CODE,
+                            region_type_mask=api.Region.T.CLASS |
+                            api.Region.T.STRUCT | api.Region.T.INTERFACE)
         self.declare_metric(self.is_active_globals,
         self.declare_metric(self.is_active_globals,
                             self.Field('globals', int, non_zero=True),
                             self.Field('globals', int, non_zero=True),
                             {
                             {
@@ -83,9 +83,9 @@ class Plugin(mpp.api.Plugin,
                              'std.code.cpp': pattern_to_search_cpp,
                              'std.code.cpp': pattern_to_search_cpp,
                              'std.code.cs': pattern_to_search_cs,
                              'std.code.cs': pattern_to_search_cs,
                             },
                             },
-                            marker_type_mask=mpp.api.Marker.T.CODE,
-                            region_type_mask=mpp.api.Region.T.GLOBAL |
-                            mpp.api.Region.T.NAMESPACE)
+                            marker_type_mask=api.Marker.T.CODE,
+                            region_type_mask=api.Region.T.GLOBAL |
+                            api.Region.T.NAMESPACE)
         self.declare_metric(self.is_active_classes,
         self.declare_metric(self.is_active_classes,
                             self.Field('classes', int, non_zero=True),
                             self.Field('classes', int, non_zero=True),
                             (None, self.ClassesCounter),
                             (None, self.ClassesCounter),
@@ -120,35 +120,35 @@ class Plugin(mpp.api.Plugin,
         super(Plugin, self).initialize(fields=self.get_fields())
         super(Plugin, self).initialize(fields=self.get_fields())
         
         
         if self.is_active() == True:
         if self.is_active() == True:
-            self.subscribe_by_parents_interface(mpp.api.ICode)
+            self.subscribe_by_parents_interface(api.ICode)
 
 
-    class ClassesCounter(mpp.api.MetricPluginMixin.PlainCounter):
+    class ClassesCounter(api.MetricPluginMixin.PlainCounter):
         def count(self, marker, pattern_to_search):
         def count(self, marker, pattern_to_search):
             self.result = sum(1 for unused in self.data.iterate_regions(
             self.result = sum(1 for unused in self.data.iterate_regions(
-                filter_group=mpp.api.Region.T.CLASS, region_id=self.region.get_id()))
+                filter_group=api.Region.T.CLASS, region_id=self.region.get_id()))
 
 
-    class StructCounter(mpp.api.MetricPluginMixin.PlainCounter):
+    class StructCounter(api.MetricPluginMixin.PlainCounter):
         def count(self, marker, pattern_to_search):
         def count(self, marker, pattern_to_search):
             self.result = sum(1 for unused in self.data.iterate_regions(
             self.result = sum(1 for unused in self.data.iterate_regions(
-                filter_group=mpp.api.Region.T.STRUCT, region_id=self.region.get_id()))
+                filter_group=api.Region.T.STRUCT, region_id=self.region.get_id()))
 
 
-    class InterfaceCounter(mpp.api.MetricPluginMixin.PlainCounter):
+    class InterfaceCounter(api.MetricPluginMixin.PlainCounter):
         def count(self, marker, pattern_to_search):
         def count(self, marker, pattern_to_search):
             self.result = sum(1 for unused in self.data.iterate_regions(
             self.result = sum(1 for unused in self.data.iterate_regions(
-                filter_group=mpp.api.Region.T.INTERFACE, region_id=self.region.get_id()))
+                filter_group=api.Region.T.INTERFACE, region_id=self.region.get_id()))
 
 
-    class TypeCounter(mpp.api.MetricPluginMixin.PlainCounter):
+    class TypeCounter(api.MetricPluginMixin.PlainCounter):
         def count(self, marker, pattern_to_search):
         def count(self, marker, pattern_to_search):
             self.result = sum(1 for unused in self.data.iterate_regions(
             self.result = sum(1 for unused in self.data.iterate_regions(
-                filter_group=mpp.api.Region.T.CLASS | mpp.api.Region.T.STRUCT |
-                 mpp.api.Region.T.INTERFACE, region_id=self.region.get_id()))
+                filter_group=api.Region.T.CLASS | api.Region.T.STRUCT |
+                 api.Region.T.INTERFACE, region_id=self.region.get_id()))
 
 
-    class MethodCounter(mpp.api.MetricPluginMixin.PlainCounter):
+    class MethodCounter(api.MetricPluginMixin.PlainCounter):
         def count(self, marker, pattern_to_search):
         def count(self, marker, pattern_to_search):
             self.result = sum(1 for unused in self.data.iterate_regions(
             self.result = sum(1 for unused in self.data.iterate_regions(
-                filter_group=mpp.api.Region.T.FUNCTION, region_id=self.region.get_id()))
+                filter_group=api.Region.T.FUNCTION, region_id=self.region.get_id()))
 
 
-    class NamespaceCounter(mpp.api.MetricPluginMixin.PlainCounter):
+    class NamespaceCounter(api.MetricPluginMixin.PlainCounter):
         def count(self, marker, pattern_to_search):
         def count(self, marker, pattern_to_search):
             self.result = sum(1 for unused in self.data.iterate_regions(
             self.result = sum(1 for unused in self.data.iterate_regions(
-                filter_group=mpp.api.Region.T.NAMESPACE, region_id=self.region.get_id()))
+                filter_group=api.Region.T.NAMESPACE, region_id=self.region.get_id()))

ext/std/code/mi.ini → metrixpp/ext/std/code/mi.ini


+ 8 - 8
ext/std/code/mi.py

@@ -5,12 +5,12 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 
 
-class Plugin(mpp.api.Plugin,
-             mpp.api.IConfigurable,
-             mpp.api.Child,
-             mpp.api.MetricPluginMixin):
+class Plugin(api.Plugin,
+             api.IConfigurable,
+             api.Child,
+             api.MetricPluginMixin):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         self.parser = parser
         self.parser = parser
@@ -39,7 +39,7 @@ class Plugin(mpp.api.Plugin,
                              'std.code.lines':(None, self.RankedLinesCounter),
                              'std.code.lines':(None, self.RankedLinesCounter),
                             },
                             },
                             # set none, because this plugin is not interested in parsing the code
                             # set none, because this plugin is not interested in parsing the code
-                            marker_type_mask=mpp.api.Marker.T.NONE)
+                            marker_type_mask=api.Marker.T.NONE)
         
         
         super(Plugin, self).initialize(fields=self.get_fields())
         super(Plugin, self).initialize(fields=self.get_fields())
 
 
@@ -47,10 +47,10 @@ class Plugin(mpp.api.Plugin,
             self.subscribe_by_parents_name('std.code.complexity')
             self.subscribe_by_parents_name('std.code.complexity')
             self.subscribe_by_parents_name('std.code.lines')
             self.subscribe_by_parents_name('std.code.lines')
 
 
-    class RankedComplexityCounter(mpp.api.MetricPluginMixin.RankedCounter):
+    class RankedComplexityCounter(api.MetricPluginMixin.RankedCounter):
         rank_source = ('std.code.complexity', 'cyclomatic')
         rank_source = ('std.code.complexity', 'cyclomatic')
         rank_ranges = [(None, 7), (8, 11), (12, 19), (20, 49), (50, None)]
         rank_ranges = [(None, 7), (8, 11), (12, 19), (20, 49), (50, None)]
     
     
-    class RankedLinesCounter(mpp.api.MetricPluginMixin.RankedCounter):
+    class RankedLinesCounter(api.MetricPluginMixin.RankedCounter):
         rank_source = ('std.code.lines', 'code')
         rank_source = ('std.code.lines', 'code')
         rank_ranges = [(None, 124), (125, 249), (250, 499), (500, 999), (1000, None)]
         rank_ranges = [(None, 124), (125, 249), (250, 499), (500, 999), (1000, None)]

+ 15 - 0
metrixpp/ext/std/code/ratio.ini

@@ -0,0 +1,15 @@
+;
+;    Metrix++, Copyright 2009-2019, Metrix++ Project
+;    Link: https://github.com/metrixplusplus/metrixplusplus
+;    
+;    This file is a part of Metrix++ Tool.
+;    
+
+[Plugin]
+version: 1.0
+package: std.code
+module:  ratio
+class:   Plugin
+depends: std.code.lines
+actions: collect
+enabled: True

+ 49 - 0
metrixpp/ext/std/code/ratio.py

@@ -0,0 +1,49 @@
+#
+#    Metrix++, Copyright 2009-2019, Metrix++ Project
+#    Link: https://github.com/metrixplusplus/metrixplusplus
+#    
+#    This file is a part of Metrix++ Tool.
+#    
+
+from metrixpp.mpp import api
+
+class Plugin(api.Plugin,
+             api.IConfigurable,
+             api.Child,
+             api.MetricPluginMixin):
+    
+    def declare_configuration(self, parser):
+        self.parser = parser
+        parser.add_option("--std.code.ratio.comments", "--scrc", action="store_true", default=False,
+                         help="Enables collection of comment ratio metric (per region detalization) - "
+                         "ratio of non-empty lines of comments to non-empty lines of (code + comments)."
+                         " It uses std.code.lines.code, std.code.lines.comments"
+                         " metrics to calculate the ratio."
+                         " [default: %default]")
+
+    def configure(self, options):
+        self.is_active_ratiocomments = options.__dict__['std.code.ratio.comments']
+        if self.is_active_ratiocomments == True:
+            required_opts = ['std.code.lines.comments', 'std.code.lines.code']
+            for each in required_opts:
+                if options.__dict__[each] == False:
+                    self.parser.error('option --std.code.ratio.comments: requires --{0} option'.
+                                      format(each))
+
+    def initialize(self):
+        self.declare_metric(self.is_active_ratiocomments,
+                            self.Field('comments', float),
+                            {
+                             'std.code.lines':(None, self.RatioCalculatorCounter)
+                            },
+                            # set none, because this plugin is not interested in parsing the code
+                            marker_type_mask=api.Marker.T.NONE)
+
+        super(Plugin, self).initialize(fields=self.get_fields())
+
+        if self.is_active() == True:
+            self.subscribe_by_parents_name('std.code.lines')
+
+    class RatioCalculatorCounter(api.MetricPluginMixin.RatioCalculator):
+        ratio_comments = ('std.code.lines', 'comments')
+        ratio_code = ('std.code.lines', 'code')

ext/std/code/test.ini → metrixpp/ext/std/code/test.ini


+ 8 - 8
ext/std/code/test.py

@@ -5,16 +5,16 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 import logging
 import logging
 
 
 # used for testing and development purposes
 # used for testing and development purposes
-class Plugin(mpp.api.Plugin, mpp.api.Child):
+class Plugin(api.Plugin, api.Child):
     
     
     def initialize(self):
     def initialize(self):
         return
         return
         # do not trigger version property set, it is a module for testing purposes
         # do not trigger version property set, it is a module for testing purposes
-        self.subscribe_by_parents_interface(mpp.api.ICode)
+        self.subscribe_by_parents_interface(api.ICode)
 
 
     def callback(self, parent, data, is_updated):
     def callback(self, parent, data, is_updated):
 
 
@@ -23,9 +23,9 @@ class Plugin(mpp.api.Plugin, mpp.api.Child):
         for region in data.iterate_regions():
         for region in data.iterate_regions():
             logging.warn(region.get_name() + " " + str(region.get_cursor()))
             logging.warn(region.get_name() + " " + str(region.get_cursor()))
             for marker in data.iterate_markers(region_id=region.get_id(),
             for marker in data.iterate_markers(region_id=region.get_id(),
-                                               filter_group = mpp.api.Marker.T.ANY,
+                                               filter_group = api.Marker.T.ANY,
                                                exclude_children = True):
                                                exclude_children = True):
-                logging.warn("\tMarker: " + mpp.api.Marker.T().to_str(marker.get_type()) +
+                logging.warn("\tMarker: " + api.Marker.T().to_str(marker.get_type()) +
                              " " + str(marker.get_offset_begin()) + " " + str(marker.get_offset_end()) +
                              " " + str(marker.get_offset_begin()) + " " + str(marker.get_offset_end()) +
                              " >>>" + text[marker.get_offset_begin():marker.get_offset_end()] + "<<<")
                              " >>>" + text[marker.get_offset_begin():marker.get_offset_end()] + "<<<")
                 text_comb += text[marker.get_offset_begin():marker.get_offset_end()]
                 text_comb += text[marker.get_offset_begin():marker.get_offset_end()]
@@ -33,9 +33,9 @@ class Plugin(mpp.api.Plugin, mpp.api.Child):
 
 
         text_comb = ""
         text_comb = ""
         for marker in data.iterate_markers(region_id=1,
         for marker in data.iterate_markers(region_id=1,
-                                           filter_group = mpp.api.Marker.T.ANY,
+                                           filter_group = api.Marker.T.ANY,
                                            exclude_children = False):
                                            exclude_children = False):
-            logging.warn("\tMarker: " + mpp.api.Marker.T().to_str(marker.get_type()) +
+            logging.warn("\tMarker: " + api.Marker.T().to_str(marker.get_type()) +
                          " " + str(marker.get_offset_begin()) + " " + str(marker.get_offset_end()) +
                          " " + str(marker.get_offset_begin()) + " " + str(marker.get_offset_end()) +
                          " >>>" + text[marker.get_offset_begin():marker.get_offset_end()] + "<<<")
                          " >>>" + text[marker.get_offset_begin():marker.get_offset_end()] + "<<<")
             text_comb += text[marker.get_offset_begin():marker.get_offset_end()]
             text_comb += text[marker.get_offset_begin():marker.get_offset_end()]
@@ -45,7 +45,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Child):
         for region in data.iterate_regions():
         for region in data.iterate_regions():
             logging.warn(region.get_name() + " " + str(region.get_cursor()))
             logging.warn(region.get_name() + " " + str(region.get_cursor()))
             for marker in data.iterate_markers(region_id=region.get_id(),
             for marker in data.iterate_markers(region_id=region.get_id(),
-                                               filter_group = mpp.api.Marker.T.ANY,
+                                               filter_group = api.Marker.T.ANY,
                                                exclude_children = True,
                                                exclude_children = True,
                                                merge = True):
                                                merge = True):
                 logging.warn("\tMarker: merged" + 
                 logging.warn("\tMarker: merged" + 

ext/std/code/todo.ini → metrixpp/ext/std/code/todo.ini


+ 10 - 10
ext/std/code/todo.py

@@ -5,13 +5,13 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 import re
 import re
 
 
-class Plugin(mpp.api.Plugin,
-             mpp.api.IConfigurable,
-             mpp.api.Child,
-             mpp.api.MetricPluginMixin):
+class Plugin(api.Plugin,
+             api.IConfigurable,
+             api.Child,
+             api.MetricPluginMixin):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         self.parser = parser
         self.parser = parser
@@ -42,16 +42,16 @@ class Plugin(mpp.api.Plugin,
         self.declare_metric(self.is_active_comments,
         self.declare_metric(self.is_active_comments,
                             self.Field('comments', int, non_zero=True),
                             self.Field('comments', int, non_zero=True),
                             self.pattern_to_search,
                             self.pattern_to_search,
-                            marker_type_mask=mpp.api.Marker.T.COMMENT,
-                            region_type_mask=mpp.api.Region.T.ANY)
+                            marker_type_mask=api.Marker.T.COMMENT,
+                            region_type_mask=api.Region.T.ANY)
         self.declare_metric(self.is_active_strings,
         self.declare_metric(self.is_active_strings,
                             self.Field('strings', int, non_zero=True),
                             self.Field('strings', int, non_zero=True),
                             self.pattern_to_search,
                             self.pattern_to_search,
-                            marker_type_mask=mpp.api.Marker.T.STRING,
-                            region_type_mask=mpp.api.Region.T.ANY)
+                            marker_type_mask=api.Marker.T.STRING,
+                            region_type_mask=api.Region.T.ANY)
         
         
         super(Plugin, self).initialize(fields=self.get_fields(),
         super(Plugin, self).initialize(fields=self.get_fields(),
             properties=[self.Property('tags', ','.join(self.tags_list))])
             properties=[self.Property('tags', ','.join(self.tags_list))])
         
         
         if self.is_active() == True:
         if self.is_active() == True:
-            self.subscribe_by_parents_interface(mpp.api.ICode)
+            self.subscribe_by_parents_interface(api.ICode)

ext/std/suppress.ini → metrixpp/ext/std/suppress.ini


+ 16 - 16
ext/std/suppress.py

@@ -5,12 +5,12 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import cout
 
 
 import re
 import re
 
 
-class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
+class Plugin(api.Plugin, api.Child, api.IConfigurable):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         parser.add_option("--std.suppress", "--ss", action="store_true", default=False,
         parser.add_option("--std.suppress", "--ss", action="store_true", default=False,
@@ -30,15 +30,15 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
             fields.append(self.Field('count', int, non_zero=True))
             fields.append(self.Field('count', int, non_zero=True))
             fields.append(self.Field('list', str))
             fields.append(self.Field('list', str))
         # - init per regions table
         # - init per regions table
-        mpp.api.Plugin.initialize(self, fields=fields)
+        api.Plugin.initialize(self, fields=fields)
         # - init per file table
         # - init per file table
-        mpp.api.Plugin.initialize(self,
+        api.Plugin.initialize(self,
                                    namespace = self.get_name() + '.file',
                                    namespace = self.get_name() + '.file',
                                    support_regions = False,
                                    support_regions = False,
                                    fields=fields)
                                    fields=fields)
         
         
         if len(fields) != 0:
         if len(fields) != 0:
-            self.subscribe_by_parents_interface(mpp.api.ICode)
+            self.subscribe_by_parents_interface(api.ICode)
 
 
     # suppress pattern
     # suppress pattern
     pattern = re.compile(r'''metrix[+][+][:][ \t]+suppress[ \t]+([^ \t\r\n\*]+)''')
     pattern = re.compile(r'''metrix[+][+][:][ \t]+suppress[ \t]+([^ \t\r\n\*]+)''')
@@ -54,7 +54,7 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
                 list_text = []
                 list_text = []
                 last_comment_end = None
                 last_comment_end = None
                 for marker in data.iterate_markers(
                 for marker in data.iterate_markers(
-                                filter_group = mpp.api.Marker.T.COMMENT,
+                                filter_group = api.Marker.T.COMMENT,
                                 region_id = region.get_id(),
                                 region_id = region.get_id(),
                                 exclude_children = True):
                                 exclude_children = True):
                     
                     
@@ -67,11 +67,11 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
                     matches = self.pattern.findall(text, marker.get_offset_begin(), marker.get_offset_end())
                     matches = self.pattern.findall(text, marker.get_offset_begin(), marker.get_offset_end())
                     for m in matches:
                     for m in matches:
                         namespace_name, field = m.split(':')
                         namespace_name, field = m.split(':')
-                        db_loader = self.get_plugin('mpp.dbf').get_loader()
+                        db_loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
                         namespace = db_loader.get_namespace(namespace_name)
                         namespace = db_loader.get_namespace(namespace_name)
                         if namespace == None or namespace.check_field(field) == False:
                         if namespace == None or namespace.check_field(field) == False:
-                            mpp.cout.notify(data.get_path(), region.get_cursor(),
-                                                  mpp.cout.SEVERITY_WARNING,
+                            cout.notify(data.get_path(), region.get_cursor(),
+                                                  cout.SEVERITY_WARNING,
                                                   "Suppressed metric '" + namespace_name + ":" + field +
                                                   "Suppressed metric '" + namespace_name + ":" + field +
                                                     "' is not being collected",
                                                     "' is not being collected",
                                                   [("Metric name", namespace_name + ":" + field),
                                                   [("Metric name", namespace_name + ":" + field),
@@ -79,8 +79,8 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
                             continue
                             continue
                         if namespace.are_regions_supported() == False:
                         if namespace.are_regions_supported() == False:
                             if region.get_id() != 1:
                             if region.get_id() != 1:
-                                mpp.cout.notify(data.get_path(), region.get_cursor(),
-                                                  mpp.cout.SEVERITY_WARNING,
+                                cout.notify(data.get_path(), region.get_cursor(),
+                                                  cout.SEVERITY_WARNING,
                                                   "Suppressed metric '" + namespace_name + ":" + field +
                                                   "Suppressed metric '" + namespace_name + ":" + field +
                                                     "' is attributed to a file, not a region. "
                                                     "' is attributed to a file, not a region. "
                                                     "Remove it or move to the beginning of the file.",
                                                     "Remove it or move to the beginning of the file.",
@@ -89,8 +89,8 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
                                 continue
                                 continue
                             
                             
                             if m in file_list_text:
                             if m in file_list_text:
-                                mpp.cout.notify(data.get_path(), region.get_cursor(),
-                                              mpp.cout.SEVERITY_WARNING,
+                                cout.notify(data.get_path(), region.get_cursor(),
+                                              cout.SEVERITY_WARNING,
                                               "Duplicate suppression of the metric '" +
                                               "Duplicate suppression of the metric '" +
                                                namespace_name + ":" + field + "'",
                                                namespace_name + ":" + field + "'",
                                               [("Metric name", namespace_name + ":" + field),
                                               [("Metric name", namespace_name + ":" + field),
@@ -102,8 +102,8 @@ class Plugin(mpp.api.Plugin, mpp.api.Child, mpp.api.IConfigurable):
                             continue
                             continue
                         
                         
                         if m in list_text:
                         if m in list_text:
-                            mpp.cout.notify(data.get_path(), region.get_cursor(),
-                                          mpp.cout.SEVERITY_WARNING,
+                            cout.notify(data.get_path(), region.get_cursor(),
+                                          cout.SEVERITY_WARNING,
                                           "Duplicate suppression of the metric '" +
                                           "Duplicate suppression of the metric '" +
                                            namespace_name + ":" + field + "'",
                                            namespace_name + ":" + field + "'",
                                           [("Metric name", namespace_name + ":" + field),
                                           [("Metric name", namespace_name + ":" + field),

ext/std/tools/__init__.py → metrixpp/ext/std/tools/__init__.py


ext/std/tools/collect.ini → metrixpp/ext/std/tools/collect.ini


+ 386 - 0
metrixpp/ext/std/tools/collect.py

@@ -0,0 +1,386 @@
+#
+#    Metrix++, Copyright 2009-2019, Metrix++ Project
+#    Link: https://github.com/metrixplusplus/metrixplusplus
+#
+#    This file is a part of Metrix++ Tool.
+#
+
+
+from metrixpp.mpp import api
+
+import re
+import os
+import sys
+import logging
+import time
+import binascii
+import fnmatch
+import multiprocessing.pool
+
+class Plugin(api.Plugin, api.Parent, api.IConfigurable, api.IRunable):
+
+    def __init__(self):
+        self.reader = DirectoryReader()
+        self.include_rules = []
+        self.exclude_rules = []
+        self.exclude_files = []
+        self.parsers       = []
+        super(Plugin, self).__init__()
+
+    def declare_configuration(self, parser):
+        parser.add_option("--std.general.proctime", "--sgpt", action="store_true", default=False,
+                         help="If the option is set (True), the tool measures processing time per file [default: %default]")
+        parser.add_option("--std.general.procerrors", "--sgpe", action="store_true", default=False,
+                         help="If the option is set (True), the tool counts number of processing/parsing errors per file [default: %default]")
+        parser.add_option("--std.general.size", "--sgs", action="store_true", default=False,
+                         help="If the option is set (True), the tool collects file size metric (in bytes) [default: %default]")
+        parser.add_option("--include-files", "--if", action='append',
+                         help="Adds a regular expression pattern to include files in processing (files have to match any rule to be included)")
+        parser.add_option("--exclude-files", "--ef", action='append',
+                         help="Adds a regular expression pattern to exclude files or directories from processing")
+        parser.add_option("--non-recursively", "--nr", action="store_true", default=False,
+                         help="If the option is set (True), sub-directories are not processed [default: %default]")
+        self.optparser = parser
+
+    def configure(self, options):
+        self.is_proctime_enabled = options.__dict__['std.general.proctime']
+        self.is_procerrors_enabled = options.__dict__['std.general.procerrors']
+        self.is_size_enabled = options.__dict__['std.general.size']
+        # check if any include rule is given
+        if options.__dict__['include_files']:
+            try:
+                for include_rule in options.__dict__['include_files']:
+                    self.add_include_rule(re.compile(include_rule))
+            except Exception as e:
+                self.optparser.error("option --include-files: " + str(e))
+        else:
+            self.add_include_rule(re.compile(r'.*'))
+
+        # check if any exclude rule is given
+        if options.__dict__['exclude_files']:
+            try:
+                for exclude_rule in options.__dict__['exclude_files']:
+                    self.add_exclude_rule(re.compile(exclude_rule))
+            except Exception as e:
+                self.optparser.error("option --exclude-files: " + str(e))
+        else:
+            self.add_exclude_rule(re.compile(r'^[.]'))
+        self.non_recursively = options.__dict__['non_recursively']
+
+    def initialize(self):
+        fields = []
+        if self.is_proctime_enabled == True:
+            fields.append(self.Field('proctime', float))
+        if self.is_procerrors_enabled == True:
+            fields.append(self.Field('procerrors', int))
+        if self.is_size_enabled == True:
+            fields.append(self.Field('size', int))
+        super(Plugin, self).initialize(namespace='std.general', support_regions=False, fields=fields)
+        self.add_exclude_file(self.get_plugin('metrixpp.mpp.dbf').get_dbfile_path())
+        self.add_exclude_file(self.get_plugin('metrixpp.mpp.dbf').get_dbfile_prev_path())
+
+    def run(self, args):
+        if len(args) == 0:
+            return self.reader.run(self, "./")
+        retcode = 0
+        for directory in args:
+            retcode += self.reader.run(self, directory)
+        return retcode
+
+    def register_parser(self, fnmatch_exp_list, parser):
+        self.parsers.append((fnmatch_exp_list, parser))
+
+    def get_parser(self, file_path):
+        for parser in self.parsers:
+            for fnmatch_exp in parser[0]:
+                if fnmatch.fnmatch(file_path, fnmatch_exp):
+                    return parser[1]
+        return None
+
+    def add_include_rule(self, re_compiled_pattern):
+        self.include_rules.append(re_compiled_pattern)
+
+    def add_exclude_rule(self, re_compiled_pattern):
+        self.exclude_rules.append(re_compiled_pattern)
+
+    def add_exclude_file(self, file_path):
+        if file_path == None:
+            return
+        self.exclude_files.append(file_path)
+
+    def is_file_excluded(self, file_name):
+        # only apply the include rules to files - skip directories
+        if os.path.isfile(file_name):
+            for each in self.include_rules:
+                if re.match(each, os.path.basename(file_name)) != None:
+                    break;
+            # file is excluded if no include rule matches
+            else:
+                return True
+        # check exclude rules for both, files and directories
+        for each in self.exclude_rules:
+            if re.match(each, os.path.basename(file_name)) != None:
+                return True
+        # finally check if a file is excluded directly
+        for each in self.exclude_files:
+            if os.path.basename(each) == os.path.basename(file_name):
+                if os.stat(each) == os.stat(file_name):
+                    return True
+        return False
+
+class DirectoryReader():
+
+    def readtextfile(self,filename):
+        """ Read a text file and try to detect the coding
+
+            Since we examine program code text files we can assume the following:
+            - There are no NUL characters, i.e. no 0x00 sequences of 1, 2 or 4
+              byte, starting on 1, 2 or 4 byte boundaries (depending on
+              1, 2 or 4 byte coding)
+            - There should at least one space (ASCII 0x20) char
+              of the respective length (1,2 or 4 byte))
+            - Program code consists of only ASCII chars, i.e. code < 128
+            - Non ASCII chars should appear in string literals and comments only
+
+            Though especially in the case of an 8 bit coding it does not matter
+            which code page to use: Metric analysis is done on program code
+            which is pure ASCII; string literals and comments are only recognized
+            as such but not interpreted, though it doesn't matter if they contain
+            non-ASCII chars whichever code page is used.
+
+            Note the decoder's different behavior for the "utf_nn" identifiers:
+            - .decode("utf_32") / .decode("utf_16"):       preceding BOM is skipped
+            - with suffix ".._be" or ".._le" respectively: preceding BOM is preserved
+            but
+            - .decode("utf_8"):     preceding BOM is preserved
+            - .decode("utf_8_sig"): preceding BOM is skipped
+        """
+        # Methods to check for various UTF variants without BOM:
+        # Since UTF16/32 codings are recommended to use a BOM these methods
+        # shouldn't be necessary but may be useful in certain cases.
+        def checkforUTF32_BE(a):
+            if ( (len(a) % 4) != 0 ): return False
+            n = a.find(b'\x00\x00\x00\x20')
+            return (n >= 0) and ((n % 4) == 0)
+        def checkforUTF32_LE(a):
+            if ( (len(a) % 4) != 0 ): return False
+            n = a.find(b'\x20\x00\x00\x00')
+            return (n >= 0) and ((n % 4) == 0)
+        def checkforUTF16_BE(a):
+            if ( (len(a) % 2) != 0 ): return False
+            n = a.find(b'\x00\x20')
+            return (n >= 0) and ((n % 2) == 0)
+        def checkforUTF16_LE(a):
+            if ( (len(a) % 2) != 0 ): return False
+            n = a.find(b'\x20\x00')
+            return (n >= 0) and ((n % 2) == 0)
+
+        # Method to check for UTF8 without BOM:
+        # "a" is the textfile represented as a simple byte array!
+        # Find first char with code > 127:
+        #
+        # 1 nothing found: all bytes 0..127; in this case "a" only consists
+        #   of ASCII chars but this may also be treated as valid UTF8 coding
+        #
+        # 2 Code is a valid UTF8 leading byte: 176..271
+        #   then check subsequent bytes to be UTF8 extension bytes: 128..175
+        #   Does also do some additional plausibility checks:
+        #   If a valid UTF8 byte sequence is found
+        #   - the subsequent byte (after the UTF8 sequence) must be an ASCII
+        #   - or another UTF8 leading byte (in the latter case we assume that there
+        #     are following the appropriate number of UTF8 extension bytes..)
+        #   Note that these checks don't guarantee the text is really UTF8 encoded:
+        #   If a valid UTF8 sequence is found but in fact the text is some sort
+        #   of 8 bit OEM coding this may be coincidentally a sequence of 8 bit
+        #   OEM chars. This indeed seems very unlikely but may happen...
+        #   Even though the whole text would examined for UTF8 sequences: every
+        #   valid UTF8 sequence found may also be a sequence of OEM chars!
+        #
+        # 3 Code is not a valid UTF8 leading byte: 128..175 or 272..255
+        #   In this case coding is some sort of 8 bit OEM coding. Since we don't
+        #   know the OEM code page the file was written with, we assume "latin_1"
+        #   (is mostly the same as ANSI but "ansi" isn't available on Python 2)
+        #
+        # return  suggested text coding: "ascii","utf_8" or "latin_1" (resp. default)
+        def checkforUTF8(a,default="latin_1"):
+
+            # Since "a" is a string array on Python 2 we use a special ORD function:
+            # Convert c to its byte representation if it is a character
+            # Works for Python 2+3
+            def ORD(c): return ord(c) if (type(c) == str) else c
+
+            L = len(a)
+            n = 0
+            while ( (n < L) and (ORD(a[n]) < 128) ): # (a[n] < ExtASCII) ):
+                n = n+1
+            if ( n >= L ):                          # all chars < 128: ASCII coding
+                return "ascii"                      # but may also be treated as UTF8!
+            w = a[n]
+
+            # UTF8 two byte sequence: leading byte + 1 extension byte
+            if ORD(w) in range(192,224):
+                if ( (n+1 < L)
+                 and (ORD(a[n+1]) in range(128,192))     # valid UTF8 extension byte
+                ):
+                    if ((n+2 == L)                  # w is last character
+                     or (ORD(a[n+2]) < 128)              # or next byte is an ASCII char
+                     or (ORD(a[n+2]) in range(192,244))  # or next byte is an UTF8 leading byte
+                    ):
+                        return "utf_8"
+                return default
+
+            # UTF8 three byte sequence: leading byte + 2 extension bytes
+            if ORD(w) in range(224,240):
+                if ( (n+2 < L)
+                 and (ORD(a[n+1]) in range(128,192))     # 2 valid UTF8 extension bytes
+                 and (ORD(a[n+2]) in range(128,192))
+                ):
+                    if ((n+3 == L)                  # w is last character
+                     or (ORD(a[n+3]) < 128)              # or next byte is ASCII char
+                     or (ORD(a[n+3]) in range(192,244))  # or next byte is UTF8 leading byte
+                    ):
+                        return "utf_8"
+                return default
+
+            # UTF8 four byte sequence: leading byte + 3 extension bytes
+            if ORD(w) in range(240,244):
+                if ( (n+3 < L)
+                 and (ORD(a[n+1]) in range(128,192))     # 3 valid UTF8 extension bytes
+                 and (ORD(a[n+2]) in range(128,192))
+                 and (ORD(a[n+3]) in range(128,192))
+                ):
+                    if ((n+4 == L)                  # w is last character
+                     or (ORD(a[n+4]) < 128)              # or next byte is ASCII char
+                     or (ORD(a[n+4]) in range(192,244))  # or next byte is UTF8 leading byte
+                    ):
+                        return "utf_8"
+                return default
+
+            # no valid UTF8 byte sequence:
+            return default;
+          # end of checkforUTF8 ------------------------------------------------
+
+        # ----------------------------------------------------------------------
+        # Subroutine readtextfile
+        # open as binary and try to guess the encoding
+        # attention:
+        # - Phyton 3: "a" is a binary array
+        # - Python 2: "a" is string array!
+        # ----------------------------------------------------------------------
+        f = open(filename, 'rb');
+        a = f.read();
+        f.close()
+
+        # check for codings with BOM:
+        # Consider the order: Check for UTF32 first!
+        if  (a.startswith(b'\xff\xfe\x00\x00')
+          or a.startswith(b'\x00\x00\xfe\xff')):
+            coding = "utf_32"       # no suffix _be/_le --> decoder skips the BOM
+        elif (a.startswith(b'\xff\xfe')
+           or a.startswith(b'\xfe\xff')):
+            coding = "utf_16"       # no suffix _be/_le --> decoder skips the BOM
+        elif a.startswith(b'\xef\xbb\xbf'):
+            coding = "utf_8_sig"
+
+        # elif: there are some other codings with BOM - feel free to add them here
+
+        # check for UTF variants without BOM:
+        # Consider the order: Check for UTF32 first!
+        elif checkforUTF32_BE(a):
+            coding = "utf_32_be"
+        elif checkforUTF32_LE(a):
+            coding = "utf_32_le"
+        elif checkforUTF16_BE(a):
+            coding = "utf_16_be"
+        elif checkforUTF16_LE(a):
+            coding = "utf_16_le"
+
+        # So finally we only have to look for UTF8 without BOM:
+        else:
+            coding = checkforUTF8(a)
+
+        # decode to text with found coding; since our guess may be wrong
+        # we replace unknown chars to avoid errors. Cause we examine program code
+        # files (i.e. true program code should only consist of ASCII chars) these
+        # replacements only should affect string literals and comments and should
+        # have no effect on metric analysis.
+        text = a.decode(coding,'replace')
+
+        # Finally replace possible line break variants with \n:
+        # todo: replace with a regex
+        text = text.replace("\r\n","\n")
+        text = text.replace("\r","\n")
+
+        return text
+
+        # end of readtextfile --------------------------------------------------
+
+    def run(self, plugin, directory):
+
+        IS_TEST_MODE = False
+        if 'METRIXPLUSPLUS_TEST_MODE' in list(os.environ.keys()):
+            IS_TEST_MODE = True
+
+        def run_per_file(plugin, fname, full_path):
+            exit_code = 0
+            norm_path = re.sub(r'''[\\]''', "/", full_path)
+            if os.path.isabs(norm_path) == False and norm_path.startswith('./') == False:
+                norm_path = './' + norm_path
+            if plugin.is_file_excluded(norm_path) == False:
+                if os.path.isdir(full_path):
+                    if plugin.non_recursively == False:
+                        exit_code += run_recursively(plugin, full_path)
+                else:
+                    parser = plugin.get_parser(full_path)
+                    if parser == None:
+                        logging.info("Skipping: " + norm_path)
+                    else:
+                        logging.info("Processing: " + norm_path)
+                        ts = time.time()
+
+                        text = self.readtextfile(full_path)
+                        #text = self.readfile_org(full_path)
+                        checksum = binascii.crc32(text.encode('utf8')) & 0xffffffff # to match python 3
+
+                        db_loader = plugin.get_plugin('metrixpp.mpp.dbf').get_loader()
+                        (data, is_updated) = db_loader.create_file_data(norm_path, checksum, text)
+                        procerrors = parser.process(plugin, data, is_updated)
+                        if plugin.is_proctime_enabled == True:
+                            data.set_data('std.general', 'proctime',
+                                          (time.time() - ts) if IS_TEST_MODE == False else 0.01)
+                        if plugin.is_procerrors_enabled == True and procerrors != None and procerrors != 0:
+                            data.set_data('std.general', 'procerrors', procerrors)
+                        if plugin.is_size_enabled == True:
+                            data.set_data('std.general', 'size', len(text))
+                        db_loader.save_file_data(data)
+                        #logging.debug("-" * 60)
+                        exit_code += procerrors
+            else:
+                logging.info("Excluding: " + norm_path)
+            return exit_code
+
+
+        #thread_pool = multiprocessing.pool.ThreadPool()
+        #def mp_worker(args):
+        #    run_per_file(args[0], args[1], args[2])
+        def run_recursively(plugin, directory):
+            exit_code = 0
+            #thread_pool.map(mp_worker,
+            #    [(plugin, f, os.path.join(subdir, f))
+            #        for subdir, dirs, files in os.walk(directory) for f in files])
+            for fname in sorted(os.listdir(directory)):
+                full_path = os.path.join(directory, fname)
+                exit_code += run_per_file(plugin, fname, full_path)
+
+            return exit_code
+
+        if os.path.exists(directory) == False:
+            logging.error("Skipping (does not exist): " + directory)
+            return 1
+
+        if os.path.isdir(directory):
+            total_errors = run_recursively(plugin, directory)
+        else:
+            total_errors = run_per_file(plugin, os.path.basename(directory), directory)
+        total_errors = total_errors # used, warnings are per file if not zero
+        return 0 # ignore errors, collection is successful anyway

+ 1 - 1
ext/std/tools/export.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 package: std.tools
 module:  export
 module:  export
 class:   Plugin
 class:   Plugin
-depends: mpp.dbf
+depends: metrixpp.mpp.dbf
 actions: export
 actions: export
 enabled: True
 enabled: True

+ 9 - 9
ext/std/tools/export.py

@@ -5,16 +5,16 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
-import mpp.utils
+from metrixpp.mpp import api
+from metrixpp.mpp import utils
 
 
 import csv
 import csv
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IRunable):
+class Plugin(api.Plugin, api.IRunable):
 
 
     def run(self, args):
     def run(self, args):
-        self.loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
-        self.loader = self.get_plugin('mpp.dbf').get_loader()
+        self.loader_prev = self.get_plugin('metrixpp.mpp.dbf').get_loader_prev()
+        self.loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
     
     
         paths = None
         paths = None
         if len(args) == 0:
         if len(args) == 0:
@@ -44,11 +44,11 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
         csvWriter.writerow(columnNames)
         csvWriter.writerow(columnNames)
         
         
         for path in paths:
         for path in paths:
-            path = mpp.utils.preprocess_path(path)
+            path = utils.preprocess_path(path)
             
             
             files = self.loader.iterate_file_data(path)
             files = self.loader.iterate_file_data(path)
             if files == None:
             if files == None:
-                mpp.utils.report_bad_path(path)
+                utils.report_bad_path(path)
                 exit_code += 1
                 exit_code += 1
                 continue
                 continue
                 
                 
@@ -56,10 +56,10 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
                 matcher = None
                 matcher = None
                 file_data_prev = self.loader_prev.load_file_data(file_data.get_path())
                 file_data_prev = self.loader_prev.load_file_data(file_data.get_path())
                 if file_data_prev != None:
                 if file_data_prev != None:
-                    matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
+                    matcher = utils.FileRegionsMatcher(file_data, file_data_prev)
                 for reg in file_data.iterate_regions():
                 for reg in file_data.iterate_regions():
                     per_reg_data = []
                     per_reg_data = []
-                    per_reg_data.append(mpp.api.Region.T().to_str(reg.get_type()))
+                    per_reg_data.append(api.Region.T().to_str(reg.get_type()))
                     if matcher != None and matcher.is_matched(reg.get_id()):
                     if matcher != None and matcher.is_matched(reg.get_id()):
                         per_reg_data.append(matcher.is_modified(reg.get_id()))
                         per_reg_data.append(matcher.is_modified(reg.get_id()))
                     else:
                     else:

+ 1 - 1
ext/std/tools/info.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 package: std.tools
 module:  info
 module:  info
 class:   Plugin
 class:   Plugin
-depends: mpp.dbf
+depends: metrixpp.mpp.dbf
 actions: info
 actions: info
 enabled: True
 enabled: True

+ 12 - 12
ext/std/tools/info.py

@@ -5,19 +5,19 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
-import mpp.cout
-import mpp.utils
+from metrixpp.mpp import api
+from metrixpp.mpp import cout
+from metrixpp.mpp import utils
 
 
 import os
 import os
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IRunable):
+class Plugin(api.Plugin, api.IRunable):
     
     
     def run(self, args):
     def run(self, args):
         exit_code = 0
         exit_code = 0
     
     
-        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev(none_if_empty=True)
-        loader = self.get_plugin('mpp.dbf').get_loader()
+        loader_prev = self.get_plugin('metrixpp.mpp.dbf').get_loader_prev(none_if_empty=True)
+        loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
     
     
         details = []
         details = []
         for each in loader.iterate_properties():
         for each in loader.iterate_properties():
@@ -29,13 +29,13 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
                 elif prev != each.value:
                 elif prev != each.value:
                     prev_value_str = " [modified (was: " + loader_prev.get_property(each.name) + ")]"
                     prev_value_str = " [modified (was: " + loader_prev.get_property(each.name) + ")]"
             details.append((each.name, each.value + prev_value_str))
             details.append((each.name, each.value + prev_value_str))
-        path = self.get_plugin('mpp.dbf').get_dbfile_path()
+        path = self.get_plugin('metrixpp.mpp.dbf').get_dbfile_path()
         if ('METRIXPLUSPLUS_TEST_MODE' in list(os.environ.keys()) and
         if ('METRIXPLUSPLUS_TEST_MODE' in list(os.environ.keys()) and
              os.environ['METRIXPLUSPLUS_TEST_MODE'] == "True"):
              os.environ['METRIXPLUSPLUS_TEST_MODE'] == "True"):
             # in tests, paths come as full paths, strip it for consistent gold files
             # in tests, paths come as full paths, strip it for consistent gold files
             # TODO: if there are other path-like arguments, it is better to think about other solution
             # TODO: if there are other path-like arguments, it is better to think about other solution
             path = os.path.basename(path)
             path = os.path.basename(path)
-        mpp.cout.notify(path, '', mpp.cout.SEVERITY_INFO, 'Created using plugins and settings:', details)
+        cout.notify(path, '', cout.SEVERITY_INFO, 'Created using plugins and settings:', details)
     
     
         details = []
         details = []
         for each in sorted(loader.iterate_namespace_names()):
         for each in sorted(loader.iterate_namespace_names()):
@@ -49,7 +49,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
                     if prev == False:
                     if prev == False:
                         prev_value_str = " [new]"
                         prev_value_str = " [new]"
                 details.append((each + ':' + field,  prev_value_str))
                 details.append((each + ':' + field,  prev_value_str))
-        mpp.cout.notify(path, '', mpp.cout.SEVERITY_INFO, 'Collected metrics:', details)
+        cout.notify(path, '', cout.SEVERITY_INFO, 'Collected metrics:', details)
     
     
         paths = None
         paths = None
         if len(args) == 0:
         if len(args) == 0:
@@ -58,11 +58,11 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
             paths = args
             paths = args
         for path in paths:
         for path in paths:
             details = []
             details = []
-            path = mpp.utils.preprocess_path(path)
+            path = utils.preprocess_path(path)
     
     
             file_iterator = loader.iterate_file_data(path=path)
             file_iterator = loader.iterate_file_data(path=path)
             if file_iterator == None:
             if file_iterator == None:
-                mpp.utils.report_bad_path(path)
+                utils.report_bad_path(path)
                 exit_code += 1
                 exit_code += 1
                 continue
                 continue
             for each in file_iterator:
             for each in file_iterator:
@@ -74,7 +74,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
                     elif prev.get_checksum() != each.get_checksum():
                     elif prev.get_checksum() != each.get_checksum():
                         prev_value_str = " [modified]"
                         prev_value_str = " [modified]"
                 details.append((each.get_path(), '{0:#x}'.format(each.get_checksum()) + prev_value_str))
                 details.append((each.get_path(), '{0:#x}'.format(each.get_checksum()) + prev_value_str))
-            mpp.cout.notify(path, '', mpp.cout.SEVERITY_INFO, 'Processed files and checksums:', details)
+            cout.notify(path, '', cout.SEVERITY_INFO, 'Processed files and checksums:', details)
             
             
         return exit_code
         return exit_code
 
 

ext/std/tools/limit.ini → metrixpp/ext/std/tools/limit.ini


+ 11 - 13
ext/std/tools/limit.py

@@ -1,18 +1,17 @@
 #
 #
 #    Metrix++, Copyright 2009-2019, Metrix++ Project
 #    Metrix++, Copyright 2009-2019, Metrix++ Project
 #    Link: https://github.com/metrixplusplus/metrixplusplus
 #    Link: https://github.com/metrixplusplus/metrixplusplus
-#    
+#
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
-#    
+#
 
 
 import logging
 import logging
-import re
 
 
-import mpp.api
-import mpp.utils
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import utils
+from metrixpp.mpp import cout
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IRunable):
+class Plugin(api.Plugin, api.IRunable):
 
 
     def print_warnings(self, args):
     def print_warnings(self, args):
         exit_code = 0
         exit_code = 0
@@ -25,10 +24,10 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
             paths = [""]
             paths = [""]
         else:
         else:
             paths = args
             paths = args
-            
+
         for path in paths:
         for path in paths:
-            path = mpp.utils.preprocess_path(path)
-            
+            path = utils.preprocess_path(path)
+
             for limit in limit_backend.iterate_limits():
             for limit in limit_backend.iterate_limits():
                 warns_count = 0
                 warns_count = 0
                 logging.info("Applying limit: " + str(limit))
                 logging.info("Applying limit: " + str(limit))
@@ -50,7 +49,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IRunable):
                                             warning.is_suppressed)
                                             warning.is_suppressed)
                     exit_code += len(warnings)
                     exit_code += len(warnings)
 
 
-                mpp.cout.notify(path, None, mpp.cout.SEVERITY_INFO, "{0} regions exceeded the limit {1}".format(len(warnings), str(limit)))
+                cout.notify(path, None, cout.SEVERITY_INFO, "{0} regions exceeded the limit {1}".format(len(warnings), str(limit)))
 
 
         return exit_code
         return exit_code
 
 
@@ -71,5 +70,4 @@ def report_limit_exceeded(path, cursor, namespace, field, region_name,
                ("Change trend", '{0:{1}}'.format(trend_value, '+' if trend_value else '')),
                ("Change trend", '{0:{1}}'.format(trend_value, '+' if trend_value else '')),
                ("Limit", stat_limit),
                ("Limit", stat_limit),
                ("Suppressed", is_suppressed)]
                ("Suppressed", is_suppressed)]
-    mpp.cout.notify(path, cursor, mpp.cout.SEVERITY_WARNING, message, details)
-
+    cout.notify(path, cursor, cout.SEVERITY_WARNING, message, details)

+ 1 - 1
ext/std/tools/limit_backend.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 package: std.tools
 module:  limit_backend
 module:  limit_backend
 class:   Plugin
 class:   Plugin
-depends: mpp.dbf
+depends: metrixpp.mpp.dbf
 actions:
 actions:
 enabled: True
 enabled: True

+ 17 - 17
ext/std/tools/limit_backend.py

@@ -8,11 +8,11 @@
 import logging
 import logging
 import re
 import re
 
 
-import mpp.api
-import mpp.utils
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import utils
+from metrixpp.mpp import cout
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
+class Plugin(api.Plugin, api.IConfigurable):
     
     
     MODE_NEW     = 0x01
     MODE_NEW     = 0x01
     MODE_TREND   = 0x03
     MODE_TREND   = 0x03
@@ -78,7 +78,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
             def __repr__(self):
             def __repr__(self):
                 return "'{0}:{1}' {2} {3} [applied to '{4}' region type(s)]".format(
                 return "'{0}:{1}' {2} {3} [applied to '{4}' region type(s)]".format(
                         self.namespace, self.field, self.filter[1], self.limit,
                         self.namespace, self.field, self.filter[1], self.limit,
-                        mpp.api.Region.T().to_str(self.region_types))
+                        api.Region.T().to_str(self.region_types))
         
         
         self.limits = []
         self.limits = []
         pattern = re.compile(r'''([^:]+)[:]([^:]+)[:]([-+]?[0-9]+(?:[.][0-9]+)?)(?:[:](.+))?''')
         pattern = re.compile(r'''([^:]+)[:]([^:]+)[:]([-+]?[0-9]+(?:[.][0-9]+)?)(?:[:](.+))?''')
@@ -91,13 +91,13 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
                 if match.group(4) != None:
                 if match.group(4) != None:
                     for region_type in match.group(4).split(','):
                     for region_type in match.group(4).split(','):
                         region_type = region_type.strip()
                         region_type = region_type.strip()
-                        group_id = mpp.api.Region.T().from_str(region_type)
+                        group_id = api.Region.T().from_str(region_type)
                         if group_id == None:
                         if group_id == None:
                             self.parser.error(
                             self.parser.error(
                                     "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
                                     "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
                         region_types |= group_id
                         region_types |= group_id
                 else:
                 else:
-                    region_types = mpp.api.Region.T().ANY
+                    region_types = api.Region.T().ANY
                 limit = Limit("max", float(match.group(3)), match.group(1), match.group(2),
                 limit = Limit("max", float(match.group(3)), match.group(1), match.group(2),
                         (match.group(2), '>', float(match.group(3))), region_types, each)
                         (match.group(2), '>', float(match.group(3))), region_types, each)
                 self.limits.append(limit)
                 self.limits.append(limit)
@@ -110,21 +110,21 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
                 if match.group(4) != None:
                 if match.group(4) != None:
                     for region_type in match.group(4).split(','):
                     for region_type in match.group(4).split(','):
                         region_type = region_type.strip()
                         region_type = region_type.strip()
-                        group_id = mpp.api.Region.T().from_str(region_type)
+                        group_id = api.Region.T().from_str(region_type)
                         if group_id == None:
                         if group_id == None:
                             self.parser.error(
                             self.parser.error(
                                     "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
                                     "option --max-limit: uknown region type (allowed: global, class, struct, namespace, function, interface, any): " + region_type)
                         region_types |= group_id
                         region_types |= group_id
                 else:
                 else:
-                    region_types = mpp.api.Region.T().ANY
+                    region_types = api.Region.T().ANY
                 limit = Limit("min", float(match.group(3)), match.group(1), match.group(2),
                 limit = Limit("min", float(match.group(3)), match.group(1), match.group(2),
                         (match.group(2), '<', float(match.group(3))), region_types, each)
                         (match.group(2), '<', float(match.group(3))), region_types, each)
                 self.limits.append(limit)
                 self.limits.append(limit)
 
 
     def initialize(self):
     def initialize(self):
         super(Plugin, self).initialize()
         super(Plugin, self).initialize()
-        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
-        loader = self.get_plugin('mpp.dbf').get_loader()
+        loader_prev = self.get_plugin('metrixpp.mpp.dbf').get_loader_prev()
+        loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
 
 
         self._verify_namespaces(loader.iterate_namespace_names())
         self._verify_namespaces(loader.iterate_namespace_names())
         for each in loader.iterate_namespace_names():
         for each in loader.iterate_namespace_names():
@@ -229,8 +229,8 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
                 self.is_modified = is_modified
                 self.is_modified = is_modified
                 self.is_suppressed = is_suppressed
                 self.is_suppressed = is_suppressed
 
 
-        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
-        loader = self.get_plugin('mpp.dbf').get_loader()
+        loader_prev = self.get_plugin('metrixpp.mpp.dbf').get_loader_prev()
+        loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
 
 
         warnings = []
         warnings = []
 
 
@@ -255,7 +255,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
                                                 sort_by=sort_by,
                                                 sort_by=sort_by,
                                                 limit_by=limit_by)
                                                 limit_by=limit_by)
         if selected_data == None:
         if selected_data == None:
-            mpp.utils.report_bad_path(path)
+            utils.report_bad_path(path)
             return None
             return None
         
         
         for select_data in selected_data:
         for select_data in selected_data:
@@ -271,15 +271,15 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
                     diff = 0
                     diff = 0
                     is_modified = False
                     is_modified = False
                 else:
                 else:
-                    matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
+                    matcher = utils.FileRegionsMatcher(file_data, file_data_prev)
                     prev_id = matcher.get_prev_id(select_data.get_region().get_id())
                     prev_id = matcher.get_prev_id(select_data.get_region().get_id())
                     if matcher.is_matched(select_data.get_region().get_id()):
                     if matcher.is_matched(select_data.get_region().get_id()):
                         if matcher.is_modified(select_data.get_region().get_id()):
                         if matcher.is_modified(select_data.get_region().get_id()):
                             is_modified = True
                             is_modified = True
                         else:
                         else:
                             is_modified = False
                             is_modified = False
-                        diff = mpp.api.DiffData(select_data,
-                                                        file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
+                        diff = api.DiffData(select_data,
+                                            file_data_prev.get_region(prev_id)).get_data(limit.namespace, limit.field)
 
 
             if (self.is_mode_matched(limit.limit,
             if (self.is_mode_matched(limit.limit,
                                             select_data.get_data(limit.namespace, limit.field),
                                             select_data.get_data(limit.namespace, limit.field),

+ 1 - 1
ext/std/tools/report.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 package: std.tools
 module:  report
 module:  report
 class:   Plugin
 class:   Plugin
-depends: mpp.dbf,std.tools.limit_backend
+depends: metrixpp.mpp.dbf,std.tools.limit_backend
 actions: report
 actions: report
 enabled: True
 enabled: True

+ 24 - 26
ext/std/tools/report.py

@@ -1,22 +1,20 @@
 #
 #
 #    Metrix++, Copyright 2009-2019, Metrix++ Project
 #    Metrix++, Copyright 2009-2019, Metrix++ Project
 #    Link: https://github.com/metrixplusplus/metrixplusplus
 #    Link: https://github.com/metrixplusplus/metrixplusplus
-#    
+#
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
-#    
+#
 
 
 import logging
 import logging
-import re
 import os
 import os
 import pytablewriter
 import pytablewriter
 
 
-import mpp.api
-import mpp.utils
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import utils
 
 
 DIGIT_COUNT = 8
 DIGIT_COUNT = 8
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
+class Plugin(api.Plugin, api.IConfigurable, api.IRunable):
 
 
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         self.parser = parser
         self.parser = parser
@@ -50,7 +48,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
         return subdirs, subfiles
         return subdirs, subfiles
 
 
     def create_doxygen_report(self, paths, output_dir, overview_data, data, loader, loader_prev):
     def create_doxygen_report(self, paths, output_dir, overview_data, data, loader, loader_prev):
-        
+
         exit_code = 1
         exit_code = 1
 
 
         if output_dir:
         if output_dir:
@@ -83,7 +81,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                 for path in paths:
                 for path in paths:
 
 
                     file.write("\\file {}\n\n".format(path))
                     file.write("\\file {}\n\n".format(path))
-                    
+
                     writer = pytablewriter.MarkdownTableWriter()
                     writer = pytablewriter.MarkdownTableWriter()
                     writer.table_name = "metrix"
                     writer.table_name = "metrix"
                     writer.headers = data[path]["file_fields"]
                     writer.headers = data[path]["file_fields"]
@@ -111,14 +109,14 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                     # add warnings as list items
                     # add warnings as list items
                     for warning in data[path]["warnings"]:
                     for warning in data[path]["warnings"]:
                         warning_text = "Metric '" + warning.namespace + ":" + warning.field + "'"
                         warning_text = "Metric '" + warning.namespace + ":" + warning.field + "'"
-                        
+
                         if warning.region_name and warning.region_name != "__global__":
                         if warning.region_name and warning.region_name != "__global__":
                             warning_text = warning_text + " for region \\ref " + warning.region_name
                             warning_text = warning_text + " for region \\ref " + warning.region_name
                         elif warning.region_name == "__global__":
                         elif warning.region_name == "__global__":
                             warning_text = warning_text + " for region " + warning.region_name
                             warning_text = warning_text + " for region " + warning.region_name
                         else:
                         else:
                             warning_text = warning_text + " for the file \\ref " + warning.path
                             warning_text = warning_text + " for the file \\ref " + warning.path
-                        
+
                         warning_text = warning_text + " exceeds the limit."
                         warning_text = warning_text + " exceeds the limit."
 
 
                         if warning.type == "max":
                         if warning.type == "max":
@@ -128,10 +126,10 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                         warning_text = warning_text + " (value: {} {} limit: {})".format(warning.stat_level,
                         warning_text = warning_text + " (value: {} {} limit: {})".format(warning.stat_level,
                                                                                          warning_comp,
                                                                                          warning_comp,
                                                                                          warning.stat_limit)
                                                                                          warning.stat_limit)
-                        
+
                         file.write("\\xrefitem metrix_warnings \"Metrix Warning\" \"Metrix Warnings\" {}\n".format(warning_text))
                         file.write("\\xrefitem metrix_warnings \"Metrix Warning\" \"Metrix Warnings\" {}\n".format(warning_text))
-                        
-                    
+
+
                     file.write("\n\n")
                     file.write("\n\n")
 
 
                 file.write("*/\n")
                 file.write("*/\n")
@@ -148,8 +146,8 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
         overview_data = {}
         overview_data = {}
         warnings = []
         warnings = []
 
 
-        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
-        loader = self.get_plugin('mpp.dbf').get_loader()
+        loader_prev = self.get_plugin('metrixpp.mpp.dbf').get_loader_prev()
+        loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
         limit_backend = self.get_plugin('std.tools.limit_backend')
         limit_backend = self.get_plugin('std.tools.limit_backend')
 
 
         paths = None
         paths = None
@@ -159,7 +157,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
             paths = args
             paths = args
 
 
         for path in paths:
         for path in paths:
-            path = mpp.utils.preprocess_path(path)
+            path = utils.preprocess_path(path)
             data[path] = {}
             data[path] = {}
             data[path]["file_data"] = {}
             data[path]["file_data"] = {}
             data[path]["file_fields"] = ["warnings"]
             data[path]["file_fields"] = ["warnings"]
@@ -170,7 +168,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
             data[path]["warnings"] = []
             data[path]["warnings"] = []
 
 
             file_data = loader.load_file_data(path)
             file_data = loader.load_file_data(path)
-            
+
             # get warnings from limit plugin
             # get warnings from limit plugin
             data[path]["warnings"] = limit_backend.get_all_warnings(path)
             data[path]["warnings"] = limit_backend.get_all_warnings(path)
             # convert paths to increase readability
             # convert paths to increase readability
@@ -183,7 +181,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                 for field in file_data.iterate_fields(namespace):
                 for field in file_data.iterate_fields(namespace):
                     data[path]["file_data"][namespace + "." +  field[0]] = field[1]
                     data[path]["file_data"][namespace + "." +  field[0]] = field[1]
                     data[path]["file_fields"].append(namespace + "." +  field[0])
                     data[path]["file_fields"].append(namespace + "." +  field[0])
-   
+
             for field in data[path]["file_fields"]:
             for field in data[path]["file_fields"]:
                 if field == "warnings":
                 if field == "warnings":
                     data[path]["file_matrix"][0].append(len(data[path]["warnings"]))
                     data[path]["file_matrix"][0].append(len(data[path]["warnings"]))
@@ -201,20 +199,20 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
 
 
                         if not (namespace + "." +  field[0]) in data[path]["region_fields"]:
                         if not (namespace + "." +  field[0]) in data[path]["region_fields"]:
                             data[path]["region_fields"].append(namespace + "." +  field[0])
                             data[path]["region_fields"].append(namespace + "." +  field[0])
-            
+
             # iterate over all found regions in the file
             # iterate over all found regions in the file
             for region in data[path]["regions"]:
             for region in data[path]["regions"]:
                 # add static columns with region name and warning count
                 # add static columns with region name and warning count
                 warning_count = sum(warning.region_name == region for warning in data[path]["warnings"])
                 warning_count = sum(warning.region_name == region for warning in data[path]["warnings"])
                 region_row = [region, str(warning_count)]
                 region_row = [region, str(warning_count)]
-                
+
                 # start iterating after the static fields
                 # start iterating after the static fields
                 for field in data[path]["region_fields"][2:]:
                 for field in data[path]["region_fields"][2:]:
                     if field in data[path]["regions"][region]:
                     if field in data[path]["regions"][region]:
                         region_row.append(data[path]["regions"][region][field])
                         region_row.append(data[path]["regions"][region][field])
                     else:
                     else:
                         region_row.append("-")
                         region_row.append("-")
-                
+
                 data[path]["region_matrix"].append(region_row)
                 data[path]["region_matrix"].append(region_row)
 
 
             # assemble overview table
             # assemble overview table
@@ -225,7 +223,7 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                 for field in value["file_fields"]:
                 for field in value["file_fields"]:
                     if not field in overview_data["fields"]:
                     if not field in overview_data["fields"]:
                         overview_data["fields"].append(field)
                         overview_data["fields"].append(field)
-            
+
             for key, value in data.items():
             for key, value in data.items():
                 overview_data["warnings"] = overview_data["warnings"] + value["warnings"]
                 overview_data["warnings"] = overview_data["warnings"] + value["warnings"]
                 row = [os.path.relpath(key), len(value["warnings"])]
                 row = [os.path.relpath(key), len(value["warnings"])]
@@ -234,10 +232,10 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
                         row.append(value["file_data"][field])
                         row.append(value["file_data"][field])
                     else:
                     else:
                         row.append("-")
                         row.append("-")
-                
+
                 overview_data["matrix"].append(row)
                 overview_data["matrix"].append(row)
-        
-        
+
+
         if self.out_format == "doxygen":
         if self.out_format == "doxygen":
             exit_code = self.create_doxygen_report(paths,
             exit_code = self.create_doxygen_report(paths,
                                                    self.out_dir,
                                                    self.out_dir,

+ 1 - 1
ext/std/tools/view.ini

@@ -10,6 +10,6 @@ version: 1.0
 package: std.tools
 package: std.tools
 module:  view
 module:  view
 class:   Plugin
 class:   Plugin
-depends: mpp.dbf
+depends: metrixpp.mpp.dbf
 actions: view
 actions: view
 enabled: True
 enabled: True

+ 70 - 27
ext/std/tools/view.py

@@ -8,13 +8,14 @@
 import logging
 import logging
 import sys
 import sys
 
 
-import mpp.api
-import mpp.utils
-import mpp.cout
+from metrixpp.mpp import api
+from metrixpp.mpp import utils
+from metrixpp.mpp import cout
+from metrixpp.mpp import promout
 
 
 DIGIT_COUNT = 8
 DIGIT_COUNT = 8
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
+class Plugin(api.Plugin, api.IConfigurable, api.IRunable):
     
     
     MODE_NEW     = 0x01
     MODE_NEW     = 0x01
     MODE_TOUCHED = 0x03
     MODE_TOUCHED = 0x03
@@ -22,9 +23,9 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
 
 
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         self.parser = parser
         self.parser = parser
-        parser.add_option("--format", "--ft", default='txt', choices=['txt', 'xml', 'python'],
+        parser.add_option("--format", "--ft", default='txt', choices=['txt', 'xml', 'python', 'prometheus'],
                           help="Format of the output data. "
                           help="Format of the output data. "
-                          "Possible values are 'xml', 'txt' or 'python' [default: %default]")
+                          "Possible values are 'xml', 'txt', 'python' or 'prometheus' [default: %default]")
         parser.add_option("--nest-regions", "--nr", action="store_true", default=False,
         parser.add_option("--nest-regions", "--nr", action="store_true", default=False,
                           help="If the option is set (True), data for regions is exported in the form of a tree. "
                           help="If the option is set (True), data for regions is exported in the form of a tree. "
                           "Otherwise, all regions are exported in plain list. [default: %default]")
                           "Otherwise, all regions are exported in plain list. [default: %default]")
@@ -55,8 +56,8 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable, mpp.api.IRunable):
             self.parser.error("option --scope-mode: The mode '" + options.__dict__['scope_mode'] + "' requires '--db-file-prev' option set")
             self.parser.error("option --scope-mode: The mode '" + options.__dict__['scope_mode'] + "' requires '--db-file-prev' option set")
 
 
     def run(self, args):
     def run(self, args):
-        loader_prev = self.get_plugin('mpp.dbf').get_loader_prev()
-        loader = self.get_plugin('mpp.dbf').get_loader()
+        loader_prev = self.get_plugin('metrixpp.mpp.dbf').get_loader_prev()
+        loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
     
     
         paths = None
         paths = None
         if len(args) == 0:
         if len(args) == 0:
@@ -83,7 +84,7 @@ def export_to_str(out_format, paths, loader, loader_prev, nest_regions, dist_col
         result += "{'view': ["
         result += "{'view': ["
 
 
     for (ind, path) in enumerate(paths):
     for (ind, path) in enumerate(paths):
-        path = mpp.utils.preprocess_path(path)
+        path = utils.preprocess_path(path)
         
         
         aggregated_data, aggregated_data_prev = load_aggregated_data_with_mode(loader, loader_prev, path , mode)
         aggregated_data, aggregated_data_prev = load_aggregated_data_with_mode(loader, loader_prev, path , mode)
         
         
@@ -95,7 +96,7 @@ def export_to_str(out_format, paths, loader, loader_prev, nest_regions, dist_col
             subdirs = sorted(aggregated_data.get_subdirs())
             subdirs = sorted(aggregated_data.get_subdirs())
             subfiles = sorted(aggregated_data.get_subfiles())
             subfiles = sorted(aggregated_data.get_subfiles())
         else:
         else:
-            mpp.utils.report_bad_path(path)
+            utils.report_bad_path(path)
             exit_code += 1
             exit_code += 1
         aggregated_data_tree = append_suppressions(path, aggregated_data_tree, loader, mode)
         aggregated_data_tree = append_suppressions(path, aggregated_data_tree, loader, mode)
 
 
@@ -122,13 +123,15 @@ def export_to_str(out_format, paths, loader, loader_prev, nest_regions, dist_col
 
 
         if out_format == 'txt':
         if out_format == 'txt':
             cout_txt(data, loader)
             cout_txt(data, loader)
+        elif out_format == 'prometheus':
+            cout_prom(data, loader)
         elif out_format == 'xml':
         elif out_format == 'xml':
-            result += mpp.utils.serialize_to_xml(data, root_name = "data", digitCount = DIGIT_COUNT) + "\n"
+            result += utils.serialize_to_xml(data, root_name = "data", digitCount = DIGIT_COUNT) + "\n"
         elif out_format == 'python':
         elif out_format == 'python':
             postfix = ""
             postfix = ""
             if ind < len(paths) - 1:
             if ind < len(paths) - 1:
                 postfix = ", "
                 postfix = ", "
-            result += mpp.utils.serialize_to_python(data, root_name = "data") + postfix
+            result += utils.serialize_to_python(data, root_name = "data") + postfix
 
 
     if out_format == 'xml':
     if out_format == 'xml':
         result += "</view>"
         result += "</view>"
@@ -144,7 +147,7 @@ def load_aggregated_data_with_mode(loader, loader_prev, path, mode):
     else:
     else:
         assert(mode == Plugin.MODE_NEW or mode == Plugin.MODE_TOUCHED)
         assert(mode == Plugin.MODE_NEW or mode == Plugin.MODE_TOUCHED)
         
         
-        class AggregatedFilteredData(mpp.api.AggregatedData):
+        class AggregatedFilteredData(api.AggregatedData):
             
             
             def __init__(self, loader, path):
             def __init__(self, loader, path):
                 super(AggregatedFilteredData, self).__init__(loader, path)
                 super(AggregatedFilteredData, self).__init__(loader, path)
@@ -245,7 +248,7 @@ def load_aggregated_data_with_mode(loader, loader_prev, path, mode):
                         result._append_data(file_data)
                         result._append_data(file_data)
                         result_prev._append_data(file_data_prev)
                         result_prev._append_data(file_data_prev)
                     # process regions separately
                     # process regions separately
-                    matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
+                    matcher = utils.FileRegionsMatcher(file_data, file_data_prev)
                     prev_reg_ids = set()
                     prev_reg_ids = set()
                     for region in file_data.iterate_regions():
                     for region in file_data.iterate_regions():
                         prev_id = matcher.get_prev_id(region.get_id())
                         prev_id = matcher.get_prev_id(region.get_id())
@@ -290,7 +293,7 @@ def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
     if file_data_prev != None:
     if file_data_prev != None:
         file_data_tree = append_diff(file_data_tree,
         file_data_tree = append_diff(file_data_tree,
                                      file_data_prev.get_data_tree())
                                      file_data_prev.get_data_tree())
-        regions_matcher = mpp.utils.FileRegionsMatcher(file_data, file_data_prev)
+        regions_matcher = utils.FileRegionsMatcher(file_data, file_data_prev)
     
     
     if nest_regions == False:
     if nest_regions == False:
         regions = []
         regions = []
@@ -303,7 +306,7 @@ def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
                                                region_data_prev.get_data_tree())
                                                region_data_prev.get_data_tree())
                 is_modified = regions_matcher.is_modified(region.get_id())
                 is_modified = regions_matcher.is_modified(region.get_id())
             regions.append({"info": {"name" : region.name,
             regions.append({"info": {"name" : region.name,
-                                     'type': mpp.api.Region.T().to_str(region.get_type()),
+                                     'type': api.Region.T().to_str(region.get_type()),
                                      'modified': is_modified,
                                      'modified': is_modified,
                                      'cursor' : region.cursor,
                                      'cursor' : region.cursor,
                                      'line_begin': region.line_begin,
                                      'line_begin': region.line_begin,
@@ -323,7 +326,7 @@ def append_regions(file_data_tree, file_data, file_data_prev, nest_regions):
                                                region_data_prev.get_data_tree())
                                                region_data_prev.get_data_tree())
                 is_modified = regions_matcher.is_modified(region.get_id())
                 is_modified = regions_matcher.is_modified(region.get_id())
             result = {"info": {"name" : region.name,
             result = {"info": {"name" : region.name,
-                               'type' : mpp.api.Region.T().to_str(region.get_type()),
+                               'type' : api.Region.T().to_str(region.get_type()),
                                'modified': is_modified,
                                'modified': is_modified,
                                'cursor' : region.cursor,
                                'cursor' : region.cursor,
                                'line_begin': region.line_begin,
                                'line_begin': region.line_begin,
@@ -541,11 +544,11 @@ def cout_txt_regions(path, regions, indent = 0):
                 if field == '__diff__':
                 if field == '__diff__':
                     continue
                     continue
                 if field in list(diff_data.keys()):
                 if field in list(diff_data.keys()):
-                    diff_str = " [" + ("+" if diff_data[field] >= 0 else "") + str(diff_data[field]) + "]"
+                    diff_str = " [" + ("+" if diff_data[field] >= 0 else "") + "{0:.3g}".format(diff_data[field]) + "]"
                 details.append((namespace + ":" + field, str(region['data'][namespace][field]) + diff_str))
                 details.append((namespace + ":" + field, str(region['data'][namespace][field]) + diff_str))
-        mpp.cout.notify(path,
+        cout.notify(path,
                         region['info']['cursor'],
                         region['info']['cursor'],
-                        mpp.cout.SEVERITY_INFO,
+                        cout.SEVERITY_INFO,
                         "Metrics per '" + region['info']['name']+ "' region",
                         "Metrics per '" + region['info']['name']+ "' region",
                         details,
                         details,
                         indent=indent)
                         indent=indent)
@@ -571,9 +574,9 @@ def cout_txt(data, loader):
                     diff_str = " [" + ("+" if diff_data[field] >= 0 else "") + str(diff_data[field]) + "]"
                     diff_str = " [" + ("+" if diff_data[field] >= 0 else "") + str(diff_data[field]) + "]"
                 details.append((namespace + ":" + field, str(data['file-data'][namespace][field]) + diff_str))
                 details.append((namespace + ":" + field, str(data['file-data'][namespace][field]) + diff_str))
     if len(details) > 0:
     if len(details) > 0:
-        mpp.cout.notify(data['info']['path'],
+        cout.notify(data['info']['path'],
                     0,
                     0,
-                    mpp.cout.SEVERITY_INFO,
+                    cout.SEVERITY_INFO,
                     "Metrics per file",
                     "Metrics per file",
                     details)
                     details)
 
 
@@ -634,15 +637,15 @@ def cout_txt(data, loader):
                 else:
                 else:
                     metric_str = str(bar['metric'])
                     metric_str = str(bar['metric'])
                 
                 
-                metric_str = (" " * (mpp.cout.DETAILS_OFFSET - len(metric_str) - 1)) + metric_str
+                metric_str = (" " * (cout.DETAILS_OFFSET - len(metric_str) - 1)) + metric_str
                 count_str = str(bar['count'])
                 count_str = str(bar['count'])
                 count_str = ((" " * (count_str_len - len(count_str))) + count_str + diff_str + "\t")
                 count_str = ((" " * (count_str_len - len(count_str))) + count_str + diff_str + "\t")
                 details.append((metric_str,
                 details.append((metric_str,
                                 "{0:.3f}".format(bar['ratio']) + " : " + "{0:.3f}".format(sum_ratio) +  " : " +
                                 "{0:.3f}".format(bar['ratio']) + " : " + "{0:.3f}".format(sum_ratio) +  " : " +
                                 count_str + ('|' * int(bar['ratio']*100))))
                                 count_str + ('|' * int(bar['ratio']*100))))
-            mpp.cout.notify(data['info']['path'],
+            cout.notify(data['info']['path'],
                     '', # no line number
                     '', # no line number
-                    mpp.cout.SEVERITY_INFO,
+                    cout.SEVERITY_INFO,
                     "Overall metrics for '" + namespace + ":" + field + "' metric",
                     "Overall metrics for '" + namespace + ":" + field + "' metric",
                     details)
                     details)
     details = []
     details = []
@@ -651,9 +654,49 @@ def cout_txt(data, loader):
     for each in sorted(data['subfiles']):
     for each in sorted(data['subfiles']):
         details.append(('File', each))
         details.append(('File', each))
     if len(details) > 0: 
     if len(details) > 0: 
-        mpp.cout.notify(data['info']['path'],
+        cout.notify(data['info']['path'],
                 '', # no line number
                 '', # no line number
-                mpp.cout.SEVERITY_INFO,
+                cout.SEVERITY_INFO,
                 "Directory content:",
                 "Directory content:",
                 details)
                 details)
     
     
+def cout_prom_regions(path, regions, indent = 0):
+    for region in regions:
+        details = []
+        for namespace in sorted(list(region['data'].keys())):
+            diff_data = {}
+            if '__diff__' in list(region['data'][namespace].keys()):
+                diff_data = region['data'][namespace]['__diff__']
+            for field in sorted(list(region['data'][namespace].keys())):
+                diff_str = ""
+                if field == '__diff__':
+                    continue
+                if field in list(diff_data.keys()):
+                    diff_str = " [" + ("+" if diff_data[field] >= 0 else "") + str(diff_data[field]) + "]"
+                details.append((namespace + ":" + field, str(region['data'][namespace][field]) + diff_str))
+        promout.notify(path = path,
+                        region = region['info']['name'],
+                        metric = "",
+                        details = details)
+        if 'subregions' in list(region.keys()):
+            cout_txt_regions(path, region['subregions'], indent=indent+1)
+            
+def cout_prom(data, loader):
+    
+    for key in list(data['file-data'].keys()):
+        if key == 'regions':
+            cout_prom_regions(data['info']['path'], data['file-data'][key])
+
+    for namespace in sorted(list(data['aggregated-data'].keys())):
+        for field in sorted(list(data['aggregated-data'][namespace].keys())):
+            details = []
+            for attr in ['avg', 'min', 'max', 'total']:
+                if isinstance(data['aggregated-data'][namespace][field][attr], float):
+                    # round the data to reach same results on platforms with different precision
+                    details.append((attr, str(round(data['aggregated-data'][namespace][field][attr], DIGIT_COUNT))))
+                else:
+                    details.append((attr, str(data['aggregated-data'][namespace][field][attr])))
+
+            promout.notify(path = data['info']['path'],
+                    metric = namespace + "." + field,
+                    details = details)

+ 7 - 18
metrixpp.py

@@ -9,42 +9,31 @@ import time
 import sys
 import sys
 import logging
 import logging
 import os
 import os
-import subprocess
 import itertools
 import itertools
 
 
-import mpp.log
-import mpp.internal.loader
+from .mpp import log
+from .mpp.internal import loader as plugin_loader
 
 
 def main():
 def main():
     
     
     os.environ['METRIXPLUSPLUS_INSTALL_DIR'] = os.path.dirname(os.path.abspath(__file__))
     os.environ['METRIXPLUSPLUS_INSTALL_DIR'] = os.path.dirname(os.path.abspath(__file__))
-    
-    exemode = None
-    if len(sys.argv[1:]) != 0:
-        exemode = sys.argv[1]
-    if exemode != "-R" and exemode != "-D":
-        exemode = '-D' # TODO implement install and release mode
-        # inject '-D' or '-R' option
-        #profile_args = ['-m', 'cProfile']
-        profile_args = []
-        exit(subprocess.call(itertools.chain([sys.executable], profile_args, [sys.argv[0], '-D'], sys.argv[1:])))
 
 
     command = ""
     command = ""
-    if len(sys.argv[1:]) > 1:
-        command = sys.argv[2]
+    if len(sys.argv) > 1:
+        command = sys.argv[1]
 
 
-    loader = mpp.internal.loader.Loader()
+    loader = plugin_loader.Loader()
     mpp_paths = []
     mpp_paths = []
     if 'METRIXPLUSPLUS_PATH' in list(os.environ.keys()):
     if 'METRIXPLUSPLUS_PATH' in list(os.environ.keys()):
         mpp_paths = os.environ['METRIXPLUSPLUS_PATH'].split(os.pathsep)
         mpp_paths = os.environ['METRIXPLUSPLUS_PATH'].split(os.pathsep)
-    args = loader.load(command, mpp_paths, sys.argv[3:])
+    args = loader.load(command, mpp_paths, sys.argv[2:])
     exit_code = loader.run(args)
     exit_code = loader.run(args)
     loader.unload()
     loader.unload()
     return exit_code
     return exit_code
     
     
 def start():
 def start():
     ts = time.time()
     ts = time.time()
-    mpp.log.set_default_format()
+    log.set_default_format()
 
 
     exit_code = main()
     exit_code = main()
     time_spent = round((time.time() - ts), 2)
     time_spent = round((time.time() - ts), 2)

mpp/__init__.py → metrixpp/mpp/__init__.py


+ 45 - 17
mpp/api.py

@@ -8,8 +8,8 @@
 import os.path
 import os.path
 import sys
 import sys
 
 
-import mpp.internal.dbwrap
-import mpp.internal.api_impl
+from metrixpp.mpp.internal import dbwrap
+from metrixpp.mpp.internal import api_impl
 
 
 class InterfaceNotImplemented(Exception):
 class InterfaceNotImplemented(Exception):
     def __init__(self, obj):
     def __init__(self, obj):
@@ -151,7 +151,7 @@ class LoadableData(Data):
         for column_name in list(row.keys()):
         for column_name in list(row.keys()):
             try:
             try:
                 packager = namespace_obj._get_field_packager(column_name)
                 packager = namespace_obj._get_field_packager(column_name)
-            except mpp.internal.api_impl.PackagerError:
+            except api_impl.PackagerError:
                 continue
                 continue
             if row[column_name] == None:
             if row[column_name] == None:
                 continue
                 continue
@@ -686,7 +686,7 @@ class Namespace(object):
         else:
         else:
             for column in self.db.iterate_columns(name):
             for column in self.db.iterate_columns(name):
                 self.add_field(column.name,
                 self.add_field(column.name,
-                               mpp.internal.api_impl.PackagerFactory().get_python_type(column.sql_type),
+                               api_impl.PackagerFactory().get_python_type(column.sql_type),
                                non_zero=column.non_zero)
                                non_zero=column.non_zero)
         
         
     def get_name(self):
     def get_name(self):
@@ -698,7 +698,7 @@ class Namespace(object):
     def add_field(self, field_name, python_type, non_zero=False):
     def add_field(self, field_name, python_type, non_zero=False):
         if not isinstance(field_name, str):
         if not isinstance(field_name, str):
             raise Namespace.FieldError(field_name, "field_name not a string")
             raise Namespace.FieldError(field_name, "field_name not a string")
-        packager = mpp.internal.api_impl.PackagerFactory().create(python_type, non_zero)
+        packager = api_impl.PackagerFactory().create(python_type, non_zero)
         if field_name in list(self.fields.keys()):
         if field_name in list(self.fields.keys()):
             raise Namespace.FieldError(field_name, "double used")
             raise Namespace.FieldError(field_name, "double used")
         self.fields[field_name] = packager
         self.fields[field_name] = packager
@@ -716,34 +716,34 @@ class Namespace(object):
     def check_field(self, field_name):
     def check_field(self, field_name):
         try:
         try:
             self._get_field_packager(field_name)
             self._get_field_packager(field_name)
-        except mpp.internal.api_impl.PackagerError:
+        except api_impl.PackagerError:
             return False
             return False
         return True
         return True
 
 
     def get_field_sql_type(self, field_name):
     def get_field_sql_type(self, field_name):
         try:
         try:
             return self._get_field_packager(field_name).get_sql_type()
             return self._get_field_packager(field_name).get_sql_type()
-        except mpp.internal.api_impl.PackagerError:
+        except api_impl.PackagerError:
             raise Namespace.FieldError(field_name, 'does not exist')
             raise Namespace.FieldError(field_name, 'does not exist')
 
 
     def get_field_python_type(self, field_name):
     def get_field_python_type(self, field_name):
         try:
         try:
             return self._get_field_packager(field_name).get_python_type()
             return self._get_field_packager(field_name).get_python_type()
-        except mpp.internal.api_impl.PackagerError:
+        except api_impl.PackagerError:
             raise Namespace.FieldError(field_name, 'does not exist')
             raise Namespace.FieldError(field_name, 'does not exist')
 
 
 
 
     def is_field_non_zero(self, field_name):
     def is_field_non_zero(self, field_name):
         try:
         try:
             return self._get_field_packager(field_name).is_non_zero()
             return self._get_field_packager(field_name).is_non_zero()
-        except mpp.internal.api_impl.PackagerError:
+        except api_impl.PackagerError:
             raise Namespace.FieldError(field_name, 'does not exist')
             raise Namespace.FieldError(field_name, 'does not exist')
 
 
     def _get_field_packager(self, field_name):
     def _get_field_packager(self, field_name):
         if field_name in list(self.fields.keys()):
         if field_name in list(self.fields.keys()):
             return self.fields[field_name]
             return self.fields[field_name]
         else:
         else:
-            raise mpp.internal.api_impl.PackagerError("unknown field " + field_name + " requested")
+            raise api_impl.PackagerError("unknown field " + field_name + " requested")
     
     
 class Loader(object):
 class Loader(object):
     
     
@@ -753,7 +753,7 @@ class Loader(object):
         self.last_file_data = None # for performance boost reasons
         self.last_file_data = None # for performance boost reasons
     
     
     def create_database(self, dbfile, previous_db = None):
     def create_database(self, dbfile, previous_db = None):
-        self.db = mpp.internal.dbwrap.Database()
+        self.db = dbwrap.Database()
         try:
         try:
             self.db.create(dbfile, clone_from=previous_db)
             self.db.create(dbfile, clone_from=previous_db)
         except:
         except:
@@ -761,7 +761,7 @@ class Loader(object):
         return True
         return True
         
         
     def open_database(self, dbfile, read_only = True):
     def open_database(self, dbfile, read_only = True):
-        self.db = mpp.internal.dbwrap.Database()
+        self.db = dbwrap.Database()
         if os.path.exists(dbfile) == False:
         if os.path.exists(dbfile) == False:
             return False
             return False
         try:
         try:
@@ -861,7 +861,7 @@ class Loader(object):
                     
                     
                     try:
                     try:
                         packager = space._get_field_packager(each[0])
                         packager = space._get_field_packager(each[0])
-                    except mpp.internal.api_impl.PackagerError:
+                    except api_impl.PackagerError:
                         raise Loader.DataNotPackable(namespace, each[0], each[1], None, "The field has not been found")
                         raise Loader.DataNotPackable(namespace, each[0], each[1], None, "The field has not been found")
         
         
                     if space.support_regions != support_regions:
                     if space.support_regions != support_regions:
@@ -871,7 +871,7 @@ class Loader(object):
                         packed_data = packager.pack(each[1])
                         packed_data = packager.pack(each[1])
                         if packed_data == None:
                         if packed_data == None:
                             continue
                             continue
-                    except mpp.internal.api_impl.PackagerError:
+                    except api_impl.PackagerError:
                         raise Loader.DataNotPackable(namespace, each[0], each[1], packager, "Packager raised exception")
                         raise Loader.DataNotPackable(namespace, each[0], each[1], packager, "Packager raised exception")
                     
                     
                     yield (each[0], packed_data)
                     yield (each[0], packed_data)
@@ -1065,7 +1065,7 @@ class Plugin(BasePlugin):
         if hasattr(self, 'is_updated') == False:
         if hasattr(self, 'is_updated') == False:
             self.is_updated = False # original initialization
             self.is_updated = False # original initialization
 
 
-        db_loader = self.get_plugin('mpp.dbf').get_loader()
+        db_loader = self.get_plugin('metrixpp.mpp.dbf').get_loader()
 
 
         if namespace == None:
         if namespace == None:
             namespace = self.get_name()
             namespace = self.get_name()
@@ -1154,12 +1154,40 @@ class MetricPluginMixin(Parent):
         
         
         def get_result(self):
         def get_result(self):
             sourced_metric = self.region.get_data(self.rank_source[0], self.rank_source[1])
             sourced_metric = self.region.get_data(self.rank_source[0], self.rank_source[1])
+            # necessary with python3
+            if sourced_metric == None:
+                assert(self.region.get_type() != Region.T.FUNCTION);
+                assert(self.rank_source == ('std.code.complexity', 'cyclomatic'));
+                return None;
             for (ind, range_pair) in enumerate(self.rank_ranges):
             for (ind, range_pair) in enumerate(self.rank_ranges):
                 if ((range_pair[0] == None or sourced_metric >= range_pair[0])
                 if ((range_pair[0] == None or sourced_metric >= range_pair[0])
                     and
                     and
                     (range_pair[1] == None or sourced_metric <= range_pair[1])):
                     (range_pair[1] == None or sourced_metric <= range_pair[1])):
-                        self.result = self.result * (ind + 1)
-                        break
+                    self.result = self.result * (ind + 1)
+                    break
+            return self.result
+
+    class RatioCalculator(PlainCounter):
+        
+        def __init__(self, *args, **kwargs):
+            super(MetricPluginMixin.RatioCalculator, self).__init__(*args, **kwargs)
+            self.result = self.region.get_data(self.namespace, self.field)
+            if self.result == None:
+                self.result = 0.0
+        
+        def get_result(self):
+            sourced_comments = self.region.get_data(self.ratio_comments[0], self.ratio_comments[1])
+            sourced_code = self.region.get_data(self.ratio_code[0], self.ratio_code[1])
+            if ((sourced_comments != None)
+                and
+                (sourced_code != None)
+                and
+                (sourced_comments + sourced_code != 0)
+               ): # 3-digit precision
+                self.result = float((1000 * sourced_comments) // (sourced_code + sourced_comments)) / 1000.
+            else:
+              self.result = 0.0
+
             return self.result
             return self.result
 
 
     def declare_metric(self, is_active, field,
     def declare_metric(self, is_active, field,

mpp/cout.py → metrixpp/mpp/cout.py


+ 1 - 1
mpp/dbf.ini

@@ -7,7 +7,7 @@
 
 
 [Plugin]
 [Plugin]
 version: 1.0
 version: 1.0
-package: mpp
+package: metrixpp.mpp
 module:  dbf
 module:  dbf
 class:   Plugin
 class:   Plugin
 depends: None
 depends: None

+ 5 - 5
mpp/dbf.py

@@ -5,12 +5,12 @@
 #    This file is a part of Metrix++ Tool.
 #    This file is a part of Metrix++ Tool.
 #    
 #    
 
 
-import mpp.api
+from metrixpp.mpp import api
 
 
 import os.path
 import os.path
 import logging
 import logging
 
 
-class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
+class Plugin(api.Plugin, api.IConfigurable):
     
     
     def declare_configuration(self, parser):
     def declare_configuration(self, parser):
         if self.get_action() == 'collect':
         if self.get_action() == 'collect':
@@ -46,16 +46,16 @@ class Plugin(mpp.api.Plugin, mpp.api.IConfigurable):
                 except:
                 except:
                     logging.warn("Failure in removing file: " + self.dbfile)
                     logging.warn("Failure in removing file: " + self.dbfile)
     
     
-            self.loader = mpp.api.Loader()
+            self.loader = api.Loader()
             created = self.loader.create_database(self.dbfile, previous_db = self.dbfile_prev)
             created = self.loader.create_database(self.dbfile, previous_db = self.dbfile_prev)
             if created == False:
             if created == False:
                 self.parser.error("option --db-file: Can not create file '{0}'".format(self.dbfile))
                 self.parser.error("option --db-file: Can not create file '{0}'".format(self.dbfile))
             
             
         else:
         else:
-            self.loader = mpp.api.Loader()
+            self.loader = api.Loader()
             if self.loader.open_database(self.dbfile) == False:
             if self.loader.open_database(self.dbfile) == False:
                 self.parser.error("option --db-file: Can not open file '{0}'".format(self.dbfile))
                 self.parser.error("option --db-file: Can not open file '{0}'".format(self.dbfile))
-            self.loader_prev = mpp.api.Loader()
+            self.loader_prev = api.Loader()
             if self.dbfile_prev != None:
             if self.dbfile_prev != None:
                 if self.loader_prev.open_database(self.dbfile_prev) == False:
                 if self.loader_prev.open_database(self.dbfile_prev) == False:
                     self.parser.error("option --db-file-prev: Can not open file '{0}'".format(self.dbfile_prev))
                     self.parser.error("option --db-file-prev: Can not open file '{0}'".format(self.dbfile_prev))

mpp/internal/__init__.py → metrixpp/mpp/internal/__init__.py


+ 3 - 2
mpp/internal/api_impl.py

@@ -111,9 +111,10 @@ class PackagerFactory(object):
 
 
     class StringPackager(IPackager):
     class StringPackager(IPackager):
         def pack(self, unpacked_data):
         def pack(self, unpacked_data):
-            if not isinstance(unpacked_data, str):
+            try:
+                return str(unpacked_data)
+            except ValueError:
                 raise PackagerError()
                 raise PackagerError()
-            return str(unpacked_data)
             
             
         def unpack(self, packed_data): 
         def unpack(self, packed_data): 
             try:
             try:

mpp/internal/dbwrap.py → metrixpp/mpp/internal/dbwrap.py


+ 0 - 0
mpp/internal/loader.py


Some files were not shown because too many files changed in this diff