diff --git a/.gitignore b/.gitignore index c5e96829..162c4156 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,9 @@ *.py[co] .*.sw? +.DS_Store /reclass-config.yml /reclass.egg-info /build /dist /.coverage +.kitchen diff --git a/.kitchen-verify.sh b/.kitchen-verify.sh new file mode 100755 index 00000000..fb3added --- /dev/null +++ b/.kitchen-verify.sh @@ -0,0 +1,22 @@ +#!/bin/bash +#set -x + +# setup +source /*.env +INVENTORY_BASE_URI=/tmp/kitchen/test/model/$MODEL +RECLASS=/tmp/kitchen + +# prereq +python -m ensurepip --default-pip +pip install pipenv + +# env +cd $RECLASS +pipenv --venv || pipenv install --python ${PYVER} +test -e /etc/reclsss || mkdir /etc/reclass +cp -avf $INVENTORY_BASE_URI/reclass-config* /etc/reclass + +# verify +for n in $(ls $INVENTORY_BASE_URI/nodes/*|sort); do + pipenv run python${PYVER} ./reclass.py --inventory-base-uri=$INVENTORY_BASE_URI --nodeinfo $(basename $n .yml) +done diff --git a/.kitchen.yml b/.kitchen.yml new file mode 100644 index 00000000..45be6296 --- /dev/null +++ b/.kitchen.yml @@ -0,0 +1,41 @@ +--- +driver: + name: docker + priviledged: false + use_sudo: false + volume: + - <%= ENV['PWD'] %>:/tmp/kitchen + + +provisioner: + name: shell + script: .kitchen-verify.sh + + +verifier: + name: inspec + +<%- pyver = ENV['PYTHON_VERSION'] || '2.7' %> + +platforms: + <% `find test/model -maxdepth 1 -mindepth 1 -type d |sort -u`.split().each do |model| %> + <% model=model.split('/')[2] %> + - name: <%= model %> + driver_config: + image: python:<%= pyver %> + platform: ubuntu + hostname: reclass + provision_command: + #FIXME, setup reclass env (prereq, configs, upload models) + #- apt-get install -y rsync + - echo " + export LC_ALL=C.UTF-8;\n + export LANG=C.UTF-8;\n + export PYVER=<%= pyver %>;\n + export MODEL=<%= model %>;\n + " > /kitchen.env + <% end %> + +suites: + - name: model + diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..559ef9ae --- /dev/null +++ b/.travis.yml @@ -0,0 +1,117 @@ +sudo: required +language: python +dist: trusty +cache: pip +python: +- '2.7' +- '3.6' +service: +- docker + +#apt: + #update: true + +#stages: +#- name: test +#- name: coverage +#- name: models +#- name: build +# if: fork = false +#- name: publish +# if: tag =~ ^v.* and fork = false and branch = 'master' + +env: + global: + - PACKAGENAME="reclass" + +install: &pyinst +- pip install -r requirements.txt +#- pip install pyparsing +#- pip install PyYAML +# To test example models with kitchen: +- | + test -e Gemfile || cat < Gemfile + source 'https://rubygems.org' + gem 'rake' + gem 'test-kitchen' + gem 'kitchen-docker' + gem 'kitchen-inspec' + gem 'inspec' +- bundle install + +script: +- python setup.py install +- find . reclass -name 'test_*.py' | sort | xargs -n1 -i% bash -c "echo %; python %" +# To test example models with kitchen: +- export PYTHON_VERSION=$TRAVIS_PYTHON_VERSION +- kitchen list +- kitchen test + +# NOTE: travis stage builds, below saved for future reference +#jobs: +# include: +# - stage: test +# script: &unittest +# - python setup.py install +# - find . reclass -name 'test_*.py' | sort | xargs -n1 -i% bash -c "echo %; python %" +# +# - stage: coverage +# install: *pyinst +# script: +# - python3 -m pytest --cov=. --cov-report=term-missing:skip-covered +# - coverage xml +# #- coveralls +# #- | +# #[ ! -z "${CODACY_PROJECT_TOKEN}" ] && python-codacy-coverage -r coverage.xml || echo "Codacy coverage NOT exported" +# +# - stage: lint +# script: +# - python3 -m flake8 +# +# - stage: models +# install: &kitchen +# - pip install PyYAML +# - pip install virtualenv +# - | +# test -e Gemfile || cat < Gemfile +# source 'https://rubygems.org' +# gem 'rake' +# gem 'test-kitchen' +# gem 'kitchen-docker' +# gem 'kitchen-inspec' +# gem 'inspec' +# - bundle install +# script: +# - export PYTHON_VERSION=$TRAVIS_PYTHON_VERSION +# - kitchen list +# #FIXME- kitchen test +# +# - stage: build +# install: *pyinst +# script: [] +# +# - stage: publish +# install: +# - "/bin/true" +# script: +# - "/bin/true" +# deploy: +# provider: pypi +# user: epcim +# password: +# secure: TBD +# on: +# tags: true +# repo: salt-formulas/reclass +# branch: master +# #FIXME, $TRAVIS_PYTHON_VERSION == '2.7' + +notifications: + webhooks: + on_success: change # options: [always|never|change] default: always + on_failure: never + on_start: never + on_cancel: never + on_error: never + email: true + diff --git a/MANIFEST.in b/MANIFEST.in index 1c1accce..1f4c27b8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,9 +4,11 @@ include LICENSE ChangeLog.rst exclude Makefile requirements.txt .pylintrc reclass.py # Exclude testing infra exclude run_tests.py +prune reclass/tests prune reclass/datatypes/tests prune reclass/storage/tests prune reclass/utils/tests +prune reclass/values/tests # Exclude "source only" content prune doc prune examples diff --git a/Pipfile b/Pipfile new file mode 100644 index 00000000..fc2022b5 --- /dev/null +++ b/Pipfile @@ -0,0 +1,18 @@ +[[source]] +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + +[dev-packages] + +[packages] +pyparsing = "*" +PyYAML = "*" +six = "*" +pyyaml = "*" +enum34 = "*" +# FIXME, issues with compile phase +#"pygit2" = "*" + +[requires] +python_version = "2.7" diff --git a/README-extensions.rst b/README-extensions.rst new file mode 100644 index 00000000..e67e441d --- /dev/null +++ b/README-extensions.rst @@ -0,0 +1,693 @@ +Escaping of References and Inventory Queries +-------------------------------------------- + +Reference and inventory queries can be escaped to produce literal strings, for example: + +.. code-block:: yaml + + parameters: + colour: Blue + unescaped: The colour is ${colour} + escaped: The colour is \${colour} + double_escaped: The colour is \\${colour} + + +This would produce: + +.. code-block:: yaml + + parameters: + colour: Blue + unescaped: The colour is Blue + escaped: The colour is ${colour} + double_escaped: The colour is \Blue + + + +Ignore class not found +---------------------- + +At some cases (bootstrapping, development) it can be convenient to ignore some missing classes. +To control the feature there are two options available: + +.. code-block:: yaml + + ignore_class_notfound: False + ignore_class_regexp: ['.*'] + +If you set regexp pattern to ``service.*`` all missing classes starting 'service.' will be logged with warning, but will not +fail to return rendered reclass. Assuming all parameter interpolation passes. + + + +Merging Referenced Lists and Dictionaries +----------------------------------------- + +Referenced lists or dicts can now be merged: + +.. code-block:: yaml + + # nodes/test.yml + classes: + - test1 + - test2 + parameters: + one: + a: 1 + b: 2 + two: + c: 3 + d: 4 + three: + e: 5 + + # classes/test1.yml + parameters: + three: ${one} + + # classes/test2.yml + parameters: + three: ${two} + +``running reclass.py --nodeinfo node1`` then gives: + +.. code-block:: yaml + + parameters: + one: + a: 1 + b: 2 + three: + a: 1 + b: 2 + c: 3 + d: 4 + e: 5 + two: + c: 3 + d: 4 + +This first sets the parameter three to the value of parameter one (class test1) then merges parameter two into +parameter three (class test2) and finally merges the parameter three definition given in the node definition into +the final value. + + +Allow override list and dicts by empty entity,None instead of merge +------------------------------------------------------------------- + +With settings: + +.. code-block:: yaml + + allow_none_override: True # default True + + # note dict,list over None is allowed and not configurable + +Referenced lists or dicts can now be overriden by None or empty type of dict, list: + +.. code-block:: yaml + + # nodes/test.yml + parameters: + one: + a: 1 + b: 2 + two: {} + three: None + + # classes/test1.yml + parameters: + one: ${two} + + # classes/test2.yml + parameters: + three: ${one} + + +Constant Parameters +-------------------------- + +Parameters can be labeled as constant by using the prefix ``=`` + +.. code-block:: yaml + + parameters: + =one: 1 + +If in the normal parameter merging a constant parameter would be changed then depending +on the setting of ``strict_constant_parameters`` either an exception is raised (``strict_constant_parameters`` true) +or the parameter is left unchanged and no notification or error is given (``strict_constant_parameters`` false) + +For example with: + +.. code-block:: yaml + + # nodes/node1.yml + classes: + - first + - second + + # classes/first.yml + parameters: + =one: 1 + + # classes/second.yml + parameters: + one: 2 + +``reclass.py --nodeinfo node1`` then gives an ''Attempt to change constant value'' error if ``strict_constant_parameters`` +is true or gives: + +.. code-block:: yaml + + parameters: + alpha: + one: 1 + +if ``strict_constant_parameters`` is false + +Default value for ``strict_constant_parameters`` is True + +.. code-block:: yaml + + strict_constant_parameters: True + + +Nested References +----------------- + +References can now be nested, for example: + +.. code-block:: yaml + + # nodes/node1.yml + parameters: + alpha: + one: ${beta:${alpha:two}} + two: a + beta: + a: 99 + +``reclass.py --nodeinfo node1`` then gives: + +.. code-block:: yaml + + parameters: + alpha: + one: 99 + two: a + beta: + a: 99 + +The ``${beta:${alpha:two}}`` construct first resolves the ``${alpha:two}`` reference to the value 'a', then resolves +the reference ``${beta:a}`` to the value 99. + + +Ignore overwritten missing references +------------------------------------- + +Given the following classes: + +.. code-block:: yaml + + # node1.yml + classes: + - class1 + - class2 + - class3 + + # class1.yml + parameters: + a: ${x} + + # class2.yml + parameters: + a: ${y} + + # class3.yml + parameters: + y: 1 + + +The parameter ``a`` only depends on the parameter ``y`` through the reference set in class2. The fact that the parameter ``x`` referenced +in class1 is not defined does not affect the final value of the parameter ``a``. For such overwritten missing references by default a warning is +printed but no error is raised, providing the final value of the parameter being evaluated is a scalar. If the final value is a dictionary or list +an error will always be raised in the case of a missing reference. + +Default value is True to keep backward compatible behavior. + +.. code-block:: yaml + + ignore_overwritten_missing_reference: True + + +Print summary of missed references +---------------------------------- + +Instead of failing on the first undefinded reference error all missing reference errors are printed at once. + +.. code-block:: yaml + + reclass --nodeinfo mynode + -> dontpanic + Cannot resolve ${_param:kkk}, at mkkek3:tree:to:fail, in yaml_fs:///test/classes/third.yml + Cannot resolve ${_param:kkk}, at mkkek3:tree:another:xxxx, in yaml_fs:///test/classes/third.yml + Cannot resolve ${_param:kkk}, at mykey2:tree:to:fail, in yaml_fs:///test/classes/third.yml + +.. code-block:: yaml + + group_errors: True + + +Use references in class names +----------------------------- + +Allows to use references in the class names. + +References pointed to in class names cannot themselves reference another key, they should be simple strings. + +To avoid pitfalls do not over-engineer your class references. They should be used only for core conditions and only for them. +A short example: `- system.wrodpress.db.${_class:database_backend}`. + +Best practices: +- use references in class names always load your global class specification prior the reference is used. +- structure your class references under parameters under one key (for example `_class`). +- use class references as a kind of "context" or "global" available options you always know what they are set. + +Class referencing for existing reclass users. Frequently when constructing your models you had to load or not load some +classes based on your setup. In most cases this lead to fork of a model or introducing kind of template generator (like cookiecutter) to +create a model based on the base "context" or "global" variables. Class referencing is a simple way how to avoid +"pre-processors" like this and if/else conditions around class section. + + +Assuming following class setup: + +* node is loading `third.yml` class only + + +Classes: + +.. code-block:: yaml + + #/etc/reclass/classes/global.yml + parameters: + _class: + env: + override: 'env.dev' + lab: + name: default + + #/etc/reclass/classes/lab/env/dev.yml + parameters: + lab: + name: dev + + #/etc/reclass/classes/second.yml + classes: + - global + - lab.${_class:env:override} + + #/etc/reclass/classes/third.yml + classes: + - global + - second + + +Reclass --nodeinfo then returns: + +.. code-block:: yaml + + ... + ... + applications: [] + environment: base + exports: {} + classes: + - global + - lab.${_class:env:override} + - second + parameters: + _class: + env: + override: env.dev + lab: + name: dev + ... + ... + + +Load classes with relative names +-------------------------------- + +Load referenced class from a relative location to the current class. +To load class from relative location start the class uri with "." or ".." char. +The only supported reference is to nested tree structure below the current class. + +You are allowed to use syntax for relative uri to required class on any place on your model (first class loaded, init.yml, regular class .yml). + +The feature is expected to improve flexibility while sharing classes between your models. + +Please mpte that you can't use '..' without any calss following. If you want simply up in the sctructure, type in '..init'. + +It's a new feature use it with care and mind that using "relative syntax" lower traceability of +your pillar composition. + +Example usage of relative class name using '.' and '..': + +.. code-block:: yaml + + #/etc/reclass/classes/component/defaults.yml + classes: + component: + config: + a: b + +.. code-block:: yaml + + #/etc/reclass/classes/component/init.yml + classes: + - .defaults + +.. code-block:: yaml + + #/etc/reclass/classes/component/configuration/init.yml + classes: + - ..defaults + + +Inventory Queries +----------------- + +Inventory querying works using a new key type - exports to hold values which other node definitions can read using a $[] query, for example with: + +.. code-block:: yaml + + # nodes/node1.yml + exports: + test_zero: 0 + test_one: + name: ${name} + value: 6 + test_two: ${dict} + + parameters: + name: node1 + dict: + a: 1 + b: 2 + exp_value_test: $[ exports:test_two ] + exp_if_test0: $[ if exports:test_zero == 0 ] + exp_if_test1: $[ exports:test_one if exports:test_one:value == 7 ] + exp_if_test2: $[ exports:test_one if exports:test_one:name == self:name ] + + # nodes/node2.yml + exports: + test_zero: 0 + test_one: + name: ${name} + value: 7 + test_two: ${dict} + + parameters: + name: node2 + dict: + a: 11 + b: 22 + + +``running reclass.py --nodeinfo node1`` gives (listing only the exports and parameters): + +.. code-block:: yaml + + exports: + test_one: + name: node1 + value: 6 + test_two: + a: 1 + b: 2 + parameters: + dict: + a: 1 + b: 2 + exp_if_test0: + - node1 + - node2 + exp_if_test1: + node2: + name: node2 + value: 7 + exp_if_test2: + node1: + name: node1 + value: 6 + exp_value_test: + node1: + a: 1 + b: 2 + node2: + a: 11 + b: 22 + name: node1 + + +Exports defined for a node can be a simple value or a reference to a parameter in the node definition. +The ``$[]`` inventory queries are calculated for simple value expressions, ``$[ exports:key ]``, by returning +a dictionary with an element (``{ node_name: key value }``) for each node which defines 'key' in the exports +section. For tests with a preceeding value, ``$[ exports:key if exports:test_key == test_value ]``, the +element (``{ node_name: key value }``) is only added to the returned dictionary if the test_key defined in +the node exports section equals the test value. For tests without a preceeding value, +``$[ if exports:test_key == test_value ]``, a list of nodes which pass the test is returned. For either test +form the test value can either be a simple value or a node parameter. And as well as an equality test +a not equals test (``!=``) can also be used. + + +**Inventory query options** + +By default inventory queries only look at nodes in the same environment as the querying node. This can be +overriden using the +AllEnvs option: + +.. code-block:: yaml + + $[ +AllEnvs exports:test ] + +Any errors in rendering the export parameters for a node will give an error for the inventory query as a whole. +This can be overriden using the ``+IgnoreErrors`` option: + +.. code-block:: yaml + + $[ +IgnoreErrors exports:test ] + +With the ``+IgnoreErrors`` option nodes which generate an error evaluating ``exports:test`` will be ignored. + +Inventory query options can be combined: + +.. code-block:: yaml + + $[ +AllEnvs +IgnoreErrors exports:test ] + +**Logical operators and/or** + +The logical operators and/or can be used in inventory queries: + +.. code-block:: yaml + + $[ exports:test_value if exports:test_zero == 0 and exports:test_one == self:value ] + +The individual elements of the if statement are evaluated and combined with the logical operators starting from the +left and working to the right. + + +**Inventory query example** + +Defining a cluster of machines using an inventory query, for example to open access to a database server to a +group of nodes. Given exports/parameters for nodes of the form: + +.. code-block:: yaml + + # for all nodes requiring access to the database server + exports: + host: + ip_address: aaa.bbb.ccc.ddd + cluster: _some_cluster_name_ + +.. code-block:: yaml + + # for the database server + parameters: + cluster_name: production-cluster + postgresql: + server: + clients: $[ exports:host:ip_address if exports:cluster == self:cluster_name ] + +This will generate a dictionary with an entry for node where the ``export:cluster`` key for a node is equal to the +``parameter:cluster_name`` key of the node on which the inventory query is run on. Each entry in the generated dictionary +will contain the value of the ``exports:host:ip_address`` key. The output dictionary (depending on node definitions) +would look like: + +.. code-block:: yaml + + node1: + ip_address: aaa.bbb.ccc.ddd + node2: + ip_address: www.xxx.yyy.zzz + +For nodes where exports:cluster key is not defined or where the key is not equal to self:cluster_name no entry is made +in the output dictionary. + +In practise the exports:cluster key can be set using a parameter reference: + +.. code-block:: yaml + + exports: + cluster: ${cluster_name} + parameters: + cluster_name: production-cluster + +The above exports and parameter definitions could be put into a separate class and then included by nodes which require +access to the database and included by the database server as well. + + +Compose node name +--------------------------- + +Nodes can be defined in subdirectories. However, node names (filename) must be unique across all subdirectories. + +For example, the following file structure is invalid: + +.. code-block:: yaml + + inventory/nodes/prod/mysql.yml + inventory/nodes/staging/mysql.yml + +With setting: + +.. code-block:: yaml + + compose_node_name: True # default False + +This adds the subfolder to the node name and the structure above can then be used. It generates the following reclass objects: + +.. code-block:: yaml + + nodes: + prod.mysql: + ... + staging.mysql: + ... + +If the subfolder path starts with the underscore character ``_``, then the subfolder path is NOT added to the node name. + + +Git storage type +---------------- + +Reclass node and class yaml files can be read from a remote git repository with the yaml_git storage type. Use nodes_uri and +classes_uri to define the git repos to use for nodes and classes. These can be the same repo. + +For salt masters using ssh connections the private and public keys must be readable by the salt daemon, which requires the +private key NOT be password protected. For stand alone reclass using ssh connections if the privkey and pubkey options +are not defined then any in memory key (from ssh-add) will be used. + +Salt master reclass config example: + +.. code-block:: yaml + + storage_type:yaml_git + nodes_uri: + # branch to use + branch: master + + # cache directory (default: ~/.reclass/git/cache) + cache_dir: /var/cache/reclass/git + + # lock directory (default: ~/.reclass/git/lock) + lock_dir: /var/cache/reclass/lock + + # private key for ssh connections (no default, but will used keys stored + # by ssh-add in memory if privkey and pubkey are not set) + privkey: /root/salt_rsa + # public key for ssh connections + pubkey: /root/salt_rsa.pub + + repo: git+ssh://gitlab@remote.server:salt/nodes.git + + classes_uri: + # branch to use or __env__ to use the branch matching the node + # environment name + branch: __env__ + + # cache directory (default: ~/.reclass/git/cache) + cache_dir: /var/cache/reclass/git + + # lock directory (default: ~/.reclass/git/lock) + lock_dir: /var/cache/reclass/lock + + # private key for ssh connections (no default, but will used keys stored + # by ssh-add in memory if privkey and pubkey are not set) + privkey: /root/salt_rsa + # public key for ssh connections + pubkey: /root/salt_rsa.pub + + # branch/env overrides for specific branches + env_overrides: + # prod env uses master branch + - prod: + branch: master + # use master branch for nodes with no environment defined + - none: + branch: master + + repo: git+ssh://gitlab@remote.server:salt/site.git + + # root directory of the class hierarcy in git repo + # defaults to root directory of git repo if not given + root: classes + + +Mixed storage type +------------------ + +Use a mixture of storage types. + +Salt master reclass config example, which by default uses yaml_git storage but overrides the location for +classes for the pre-prod environment to use a directory on the local disc: + +.. code-block:: yaml + + storage_type: mixed + nodes_uri: + # storage type to use + storage_type: yaml_git + + # yaml_git storage options + branch: master + cache_dir: /var/cache/reclass/git + lock_dir: /var/cache/reclass/lock + privkey: /root/salt_rsa + pubkey: /root/salt_rsa.pub + repo: git+ssh://gitlab@remote.server:salt/nodes.git + + classes_uri: + # storage type to use + storage_type: yaml_git + + # yaml_git storage options + branch: __env__ + cache_dir: /var/cache/reclass/git + lock_dir: /var/cache/reclass/lock + privkey: /root/salt_rsa + pubkey: /root/salt_rsa.pub + repo: git+ssh://gitlab@remote.server:salt/site.git + root: classes + + env_overrides: + - prod: + branch: master + - none: + branch: master + - pre-prod: + # override storage type for this environment + storage_type: yaml_fs + # options for yaml_fs storage type + uri: /srv/salt/env/pre-prod/classes diff --git a/README.rst b/README.rst index e88c1356..5dcb5a19 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,50 @@ -reclass README -============== +Reclass README +========================= + +This is the fork of original **reclass** that is available at: +https://github.com/madduck/reclass + +Extentions +========== + +List of the core features: + +* Escaping of References and Inventory Queries +* Merging Referenced Lists and Dictionaries +* Nested References +* Inventory Queries +* Ignore class notfound/regexp option + + +Documentation +============= + +.. _README-extensions: README-extensions.rst + +Documentation covering the original version is in the doc directory. +See the `README-extensions`_ file for documentation on the extentions. + + +.. include:: ./README-extensions.rst + + +Reclass related projects/tools +============================== + +Queries: + +* yg, yaml grep with 'jq' syntax - https://gist.github.com/epcim/f1c5b748fa7c942de50677aef04f29f8, (https://asciinema.org/a/84173) +* reclass-graph - https://github.com/tomkukral/reclass-graph + +Introspection, manupulation: + +* reclass-tools, for manipulating reclass models - https://github.com/dis-xcom/reclass_tools + +YAML merge tools: + +* spruce, general purpose YAML & JSON merging tool - https://github.com/geofffranks/spruce + +Other: + +* saltclass, new pillar/master_tops module for salt with the behaviour of reclass - https://github.com/saltstack/salt/pull/42349 -The documentation for **reclass** is available from -http://reclass.pantsfullofunix.net. diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index d7aa7b2b..dccf34fe 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -5,6 +5,24 @@ ChangeLog ========= ========== ======================================================== Version Date Changes ========= ========== ======================================================== +1.6.0 2018-11-06 * Python code and parser refactoring by a-ovchinnikov + * Improvements in yaml_git and mixed setup by Andrew Pickford + * Relative paths in class names by Petr Michalec, Martin Polreich and Andrew Pickford + * Bug Fixes for recently added features +1.5.6 2018-07-30 * Fix, usage of integers as pillar keys + * Refactoring python codebase by @a-ovchinkonv + * New feature, "compose node name" from node subdirectory structure (by @gburiola) +1.5.5 2018-07 * Add immutable (constant) parameters + * Fixes +1.5.4 2018-05 * Add support for salt 2018.3 + * Add support for python 2.7/3.x + * Extend tests coverage +1.5.3 2018 * Add new features + fixes + - last 'known' full compatible release with original reclass + - release shipped as well as .deb package at mirror.mirantis.com +1.5.x 2017 * Project forked under salt-formulas/reclass + - based on @andrewpickford fork and community fixes + - features against original are in README-extensions.rst 1.4.1 2014-10-28 * Revert debug logging, which wasn't fault-free and so it needs more time to mature. 1.4 2014-10-25 * Add rudimentary debug logging diff --git a/doc/source/conf.py b/doc/source/conf.py index 422128e4..6ce7f02c 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -16,7 +16,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath('../../')) # -- General configuration ----------------------------------------------------- diff --git a/reclass.py b/reclass.py index a0d8eb8c..9c71a101 100755 --- a/reclass.py +++ b/reclass.py @@ -6,6 +6,10 @@ # Copyright © 2007–13 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals import reclass.cli reclass.cli.main() diff --git a/reclass/__init__.py b/reclass/__init__.py index 7cd6c30f..d5f34103 100644 --- a/reclass/__init__.py +++ b/reclass/__init__.py @@ -6,17 +6,23 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals -from output import OutputLoader -from storage.loader import StorageBackendLoader -from storage.memcache_proxy import MemcacheProxy +from .output import OutputLoader +from .storage.loader import StorageBackendLoader +from .storage.memcache_proxy import MemcacheProxy -def get_storage(storage_type, nodes_uri, classes_uri, **kwargs): +def get_storage(storage_type, nodes_uri, classes_uri, compose_node_name, **kwargs): storage_class = StorageBackendLoader(storage_type).load() - return MemcacheProxy(storage_class(nodes_uri, classes_uri, **kwargs)) + return MemcacheProxy(storage_class(nodes_uri, classes_uri, compose_node_name, **kwargs)) +def get_path_mangler(storage_type, **kwargs): + return StorageBackendLoader(storage_type).path_mangler() -def output(data, fmt, pretty_print=False): +def output(data, fmt, pretty_print=False, no_refs=False): output_class = OutputLoader(fmt).load() outputter = output_class() - return outputter.dump(data, pretty_print=pretty_print) + return outputter.dump(data, pretty_print=pretty_print, no_refs=no_refs) diff --git a/reclass/adapters/__init__.py b/reclass/adapters/__init__.py index 8a17572b..06edb64d 100755 --- a/reclass/adapters/__init__.py +++ b/reclass/adapters/__init__.py @@ -5,4 +5,8 @@ # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 -# + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals diff --git a/reclass/adapters/ansible.py b/reclass/adapters/ansible.py index cbf5f173..be671985 100755 --- a/reclass/adapters/ansible.py +++ b/reclass/adapters/ansible.py @@ -10,15 +10,26 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +# 2017.08.08 Andew Pickford +# The ansible adapter has received little testing and may not work at all now. + + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals import os, sys, posix, optparse +from six import iteritems + from reclass import get_storage, output from reclass.core import Core from reclass.errors import ReclassException from reclass.config import find_and_read_configfile, get_options from reclass.version import * from reclass.constants import MODE_NODEINFO +from reclass.settings import Settings def cli(): try: @@ -27,6 +38,7 @@ def cli(): ansible_dir = os.path.abspath(os.path.dirname(sys.argv[0])) defaults = {'inventory_base_uri': ansible_dir, + 'no_refs' : False, 'pretty_print' : True, 'output' : 'json', 'applications_postfix': '_hosts' @@ -54,10 +66,14 @@ def add_ansible_options_group(parser, defaults): add_options_cb=add_ansible_options_group, defaults=defaults) - storage = get_storage(options.storage_type, options.nodes_uri, - options.classes_uri) + storage = get_storage(options.storage_type, + options.nodes_uri, + options.classes_uri, + options.compose_node_name) class_mappings = defaults.get('class_mappings') - reclass = Core(storage, class_mappings) + defaults.update(vars(options)) + settings = Settings(defaults) + reclass = Core(storage, class_mappings, settings) if options.mode == MODE_NODEINFO: data = reclass.nodeinfo(options.hostname) @@ -75,15 +91,15 @@ def add_ansible_options_group(parser, defaults): apps = data['applications'] if options.applications_postfix: postfix = options.applications_postfix - groups.update([(k + postfix, v) for k,v in apps.iteritems()]) + groups.update([(k + postfix, v) for (k, v) in iteritems(apps)]) else: groups.update(apps) data = groups - print output(data, options.output, options.pretty_print) + print(output(data, options.output, options.pretty_print, options.no_refs)) - except ReclassException, e: + except ReclassException as e: e.exit_with_message(sys.stderr) sys.exit(posix.EX_OK) diff --git a/reclass/adapters/salt.py b/reclass/adapters/salt.py index 1b458231..523b0c46 100755 --- a/reclass/adapters/salt.py +++ b/reclass/adapters/salt.py @@ -6,16 +6,22 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals import os, sys, posix -from reclass import get_storage, output +from six import iteritems + +from reclass import get_storage, output, get_path_mangler from reclass.core import Core from reclass.errors import ReclassException -from reclass.config import find_and_read_configfile, get_options, \ - path_mangler +from reclass.config import find_and_read_configfile, get_options from reclass.constants import MODE_NODEINFO from reclass.defaults import * +from reclass.settings import Settings from reclass.version import * def ext_pillar(minion_id, pillar, @@ -24,16 +30,18 @@ def ext_pillar(minion_id, pillar, nodes_uri=OPT_NODES_URI, classes_uri=OPT_CLASSES_URI, class_mappings=None, - propagate_pillar_data_to_reclass=False): + propagate_pillar_data_to_reclass=False, + compose_node_name=OPT_COMPOSE_NODE_NAME, + **kwargs): - nodes_uri, classes_uri = path_mangler(inventory_base_uri, - nodes_uri, classes_uri) - storage = get_storage(storage_type, nodes_uri, classes_uri, - default_environment='base') + path_mangler = get_path_mangler(storage_type) + nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) + storage = get_storage(storage_type, nodes_uri, classes_uri, compose_node_name) input_data = None if propagate_pillar_data_to_reclass: input_data = pillar - reclass = Core(storage, class_mappings, input_data=input_data) + settings = Settings(kwargs) + reclass = Core(storage, class_mappings, settings, input_data=input_data) data = reclass.nodeinfo(minion_id) params = data.get('parameters', {}) @@ -47,14 +55,14 @@ def ext_pillar(minion_id, pillar, def top(minion_id, storage_type=OPT_STORAGE_TYPE, inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI, - classes_uri=OPT_CLASSES_URI, - class_mappings=None): + classes_uri=OPT_CLASSES_URI, class_mappings=None, compose_node_name=OPT_COMPOSE_NODE_NAME, + **kwargs): - nodes_uri, classes_uri = path_mangler(inventory_base_uri, - nodes_uri, classes_uri) - storage = get_storage(storage_type, nodes_uri, classes_uri, - default_environment='base') - reclass = Core(storage, class_mappings, input_data=None) + path_mangler = get_path_mangler(storage_type) + nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) + storage = get_storage(storage_type, nodes_uri, classes_uri, compose_node_name) + settings = Settings(kwargs) + reclass = Core(storage, class_mappings, settings, input_data=None) # if the minion_id is not None, then return just the applications for the # specific minion, otherwise return the entire top data (which we need for @@ -68,7 +76,7 @@ def top(minion_id, storage_type=OPT_STORAGE_TYPE, else: data = reclass.inventory() nodes = {} - for node_id, node_data in data['nodes'].iteritems(): + for (node_id, node_data) in iteritems(data['nodes']): env = node_data['environment'] if env not in nodes: nodes[env] = {} @@ -81,6 +89,7 @@ def cli(): try: inventory_dir = os.path.abspath(os.path.dirname(sys.argv[0])) defaults = {'pretty_print' : True, + 'no_refs' : False, 'output' : 'yaml', 'inventory_base_uri': inventory_dir } @@ -95,6 +104,12 @@ def cli(): nodeinfo_help='output pillar data for a specific node', defaults=defaults) class_mappings = defaults.get('class_mappings') + defaults.update(vars(options)) + defaults.pop("storage_type", None) + defaults.pop("inventory_base_uri", None) + defaults.pop("nodes_uri", None) + defaults.pop("classes_uri", None) + defaults.pop("class_mappings", None) if options.mode == MODE_NODEINFO: data = ext_pillar(options.nodename, {}, @@ -102,18 +117,20 @@ def cli(): inventory_base_uri=options.inventory_base_uri, nodes_uri=options.nodes_uri, classes_uri=options.classes_uri, - class_mappings=class_mappings) + class_mappings=class_mappings, + **defaults) else: data = top(minion_id=None, storage_type=options.storage_type, inventory_base_uri=options.inventory_base_uri, nodes_uri=options.nodes_uri, classes_uri=options.classes_uri, - class_mappings=class_mappings) + class_mappings=class_mappings, + **defaults) - print output(data, options.output, options.pretty_print) + print(output(data, options.output, options.pretty_print, options.no_refs)) - except ReclassException, e: + except ReclassException as e: e.exit_with_message(sys.stderr) sys.exit(posix.EX_OK) diff --git a/reclass/cli.py b/reclass/cli.py index 5666e165..38bd5fc4 100644 --- a/reclass/cli.py +++ b/reclass/cli.py @@ -6,40 +6,48 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals import sys, os, posix from reclass import get_storage, output from reclass.core import Core +from reclass.settings import Settings from reclass.config import find_and_read_configfile, get_options -from reclass.errors import ReclassException from reclass.defaults import * +from reclass.errors import ReclassException from reclass.constants import MODE_NODEINFO from reclass.version import * def main(): try: - defaults = {'pretty_print' : OPT_PRETTY_PRINT, + defaults = {'no_refs' : OPT_NO_REFS, + 'pretty_print' : OPT_PRETTY_PRINT, 'output' : OPT_OUTPUT } defaults.update(find_and_read_configfile()) - options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, - defaults=defaults) - storage = get_storage(options.storage_type, options.nodes_uri, - options.classes_uri, default_environment='base') + options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, defaults=defaults) + storage = get_storage(options.storage_type, + options.nodes_uri, + options.classes_uri, + options.compose_node_name) class_mappings = defaults.get('class_mappings') - reclass = Core(storage, class_mappings) + defaults.update(vars(options)) + settings = Settings(defaults) + reclass = Core(storage, class_mappings, settings) if options.mode == MODE_NODEINFO: data = reclass.nodeinfo(options.nodename) - else: data = reclass.inventory() - print output(data, options.output, options.pretty_print) + print(output(data, options.output, options.pretty_print, options.no_refs)) - except ReclassException, e: + except ReclassException as e: e.exit_with_message(sys.stderr) sys.exit(posix.EX_OK) diff --git a/reclass/config.py b/reclass/config.py index 17d0dc6f..d24f7fd3 100644 --- a/reclass/config.py +++ b/reclass/config.py @@ -6,12 +6,17 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals import yaml, os, optparse, posix, sys -import errors -from defaults import * -from constants import MODE_NODEINFO, MODE_INVENTORY +from . import errors, get_path_mangler +from .defaults import * +from .constants import MODE_NODEINFO, MODE_INVENTORY + def make_db_options_group(parser, defaults={}): ret = optparse.OptionGroup(parser, 'Database options', @@ -20,15 +25,23 @@ def make_db_options_group(parser, defaults={}): default=defaults.get('storage_type', OPT_STORAGE_TYPE), help='the type of storage backend to use [%default]') ret.add_option('-b', '--inventory-base-uri', dest='inventory_base_uri', - default=defaults.get('inventory_base_uri', - OPT_INVENTORY_BASE_URI), + default=defaults.get('inventory_base_uri', OPT_INVENTORY_BASE_URI), help='the base URI to prepend to nodes and classes [%default]'), ret.add_option('-u', '--nodes-uri', dest='nodes_uri', default=defaults.get('nodes_uri', OPT_NODES_URI), help='the URI to the nodes storage [%default]'), ret.add_option('-c', '--classes-uri', dest='classes_uri', default=defaults.get('classes_uri', OPT_CLASSES_URI), - help='the URI to the classes storage [%default]') + help='the URI to the classes storage [%default]'), + ret.add_option('-z', '--ignore-class-notfound', dest='ignore_class_notfound', + default=defaults.get('ignore_class_notfound', OPT_IGNORE_CLASS_NOTFOUND), + help='decision for not found classes [%default]') + ret.add_option('-a', '--compose-node-name', dest='compose_node_name', action="store_true", + default=defaults.get('compose_node_name', OPT_COMPOSE_NODE_NAME), + help='Add subdir when generating node names. [%default]') + ret.add_option('-x', '--ignore-class-notfound-regexp', dest='ignore_class_notfound_regexp', + default=defaults.get('ignore_class_notfound_regexp', OPT_IGNORE_CLASS_NOTFOUND_REGEXP), + help='regexp for not found classes [%default]') return ret @@ -38,10 +51,17 @@ def make_output_options_group(parser, defaults={}): ret.add_option('-o', '--output', dest='output', default=defaults.get('output', OPT_OUTPUT), help='output format (yaml or json) [%default]') - ret.add_option('-y', '--pretty-print', dest='pretty_print', - action="store_true", + ret.add_option('-y', '--pretty-print', dest='pretty_print', action="store_true", default=defaults.get('pretty_print', OPT_PRETTY_PRINT), help='try to make the output prettier [%default]') + ret.add_option('-r', '--no-refs', dest='no_refs', action="store_true", + default=defaults.get('no_refs', OPT_NO_REFS), + help='output all key values do not use yaml references [%default]') + ret.add_option('-1', '--single-error', dest='group_errors', action="store_false", + default=defaults.get('group_errors', OPT_GROUP_ERRORS), + help='throw errors immediately instead of grouping them together') + ret.add_option('-0', '--multiple-errors', dest='group_errors', action="store_true", + help='were possible report any errors encountered as a group') return ret @@ -128,30 +148,6 @@ def option_checker(options, args): return parser, option_checker -def path_mangler(inventory_base_uri, nodes_uri, classes_uri): - - if inventory_base_uri is None: - # if inventory_base is not given, default to current directory - inventory_base_uri = os.getcwd() - - nodes_uri = nodes_uri or 'nodes' - classes_uri = classes_uri or 'classes' - - def _path_mangler_inner(path): - ret = os.path.join(inventory_base_uri, path) - ret = os.path.expanduser(ret) - return os.path.abspath(ret) - - n, c = map(_path_mangler_inner, (nodes_uri, classes_uri)) - if n == c: - raise errors.DuplicateUriError(n, c) - common = os.path.commonprefix((n, c)) - if common == n or common == c: - raise errors.UriOverlapError(n, c) - - return n, c - - def get_options(name, version, description, inventory_shortopt='-i', inventory_longopt='--inventory', @@ -175,15 +171,14 @@ def get_options(name, version, description, options, args = parser.parse_args() checker(options, args) - options.nodes_uri, options.classes_uri = \ - path_mangler(options.inventory_base_uri, options.nodes_uri, - options.classes_uri) + path_mangler = get_path_mangler(options.storage_type) + options.nodes_uri, options.classes_uri = path_mangler(options.inventory_base_uri, options.nodes_uri, options.classes_uri) return options def vvv(msg): - #print >>sys.stderr, msg + #print(msg, file=sys.stderr) pass @@ -192,8 +187,8 @@ def find_and_read_configfile(filename=CONFIG_FILE_NAME, for d in dirs: f = os.path.join(d, filename) if os.access(f, os.R_OK): - vvv('Using config file: {0}'.format(f)) - return yaml.safe_load(file(f)) + vvv('Using config file: {0}'.format(str(f))) + return yaml.safe_load(open(f)) elif os.path.isfile(f): raise PermissionsError('cannot read %s' % f) return {} diff --git a/reclass/constants.py b/reclass/constants.py index f69fa8cb..58f77697 100644 --- a/reclass/constants.py +++ b/reclass/constants.py @@ -6,6 +6,11 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + class _Constant(object): diff --git a/reclass/core.py b/reclass/core.py index 76bd0a8e..3e0ab34d 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -6,22 +6,40 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals +import copy import time -#import types import re -#import sys import fnmatch import shlex -from reclass.datatypes import Entity, Classes, Parameters -from reclass.errors import MappingFormatError, ClassNotFound +import string +import sys +import yaml + +from six import iteritems + +from reclass.settings import Settings +from reclass.datatypes import Entity, Classes, Parameters, Exports +from reclass.errors import MappingFormatError, ClassNameResolveError, ClassNotFound, InvQueryClassNameResolveError, InvQueryClassNotFound, InvQueryError, InterpolationError, ResolveError +from reclass.values.parser import Parser + class Core(object): - def __init__(self, storage, class_mappings, input_data=None): + _parser = Parser() + + def __init__(self, storage, class_mappings, settings, input_data=None): self._storage = storage self._class_mappings = class_mappings + self._settings = settings self._input_data = input_data + if self._settings.ignore_class_notfound: + self._cnf_r = re.compile( + '|'.join(self._settings.ignore_class_notfound_regexp)) @staticmethod def _get_timestamp(): @@ -47,7 +65,7 @@ def _shlex_split(instr): regexp = True try: key = lexer.get_token() - except ValueError, e: + except ValueError as e: raise MappingFormatError('Error in mapping "{0}": missing closing ' 'quote (or slash)'.format(instr)) if regexp: @@ -56,7 +74,7 @@ def _shlex_split(instr): def _get_class_mappings_entity(self, nodename): if not self._class_mappings: - return Entity(name='empty (class mappings)') + return Entity(self._settings, name='empty (class mappings)') c = Classes() for mapping in self._class_mappings: matched = False @@ -72,32 +90,57 @@ def _get_class_mappings_entity(self, nodename): for klass in klasses: c.append_if_new(klass) - return Entity(classes=c, + return Entity(self._settings, classes=c, name='class mappings for node {0}'.format(nodename)) def _get_input_data_entity(self): if not self._input_data: - return Entity(name='empty (input data)') - p = Parameters(self._input_data) - return Entity(parameters=p, name='input data') + return Entity(self._settings, name='empty (input data)') + p = Parameters(self._input_data, self._settings) + return Entity(self._settings, parameters=p, name='input data') - def _recurse_entity(self, entity, merge_base=None, seen=None, nodename=None): + def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, nodename=None, environment=None): if seen is None: seen = {} + if environment is None: + environment = self._settings.default_environment + if merge_base is None: - merge_base = Entity(name='empty (@{0})'.format(nodename)) + merge_base = Entity(self._settings, name='empty (@{0})'.format(nodename)) + + if context is None: + context = Entity(self._settings, name='empty (@{0})'.format(nodename)) for klass in entity.classes.as_list(): + # class name contain reference + num_references = klass.count(self._settings.reference_sentinels[0]) +\ + klass.count(self._settings.export_sentinels[0]) + if num_references > 0: + try: + klass = str(self._parser.parse(klass, self._settings).render(merge_base.parameters.as_dict(), {})) + except ResolveError as e: + try: + klass = str(self._parser.parse(klass, self._settings).render(context.parameters.as_dict(), {})) + except ResolveError as e: + raise ClassNameResolveError(klass, nodename, entity.uri) + if klass not in seen: try: - class_entity = self._storage.get_class(klass) - except ClassNotFound, e: - e.set_nodename(nodename) - raise e + class_entity = self._storage.get_class(klass, environment, self._settings) + except ClassNotFound as e: + if self._settings.ignore_class_notfound: + if self._cnf_r.match(klass): + if self._settings.ignore_class_notfound_warning: + # TODO, add logging handler + print("[WARNING] Reclass class not found: '%s'. Skipped!" % klass, file=sys.stderr) + continue + e.nodename = nodename + e.uri = entity.uri + raise - descent = self._recurse_entity(class_entity, seen=seen, - nodename=nodename) + descent = self._recurse_entity(class_entity, context=context, seen=seen, + nodename=nodename, environment=environment) # on every iteration, we merge the result of the recursive # descent into what we have so far… merge_base.merge(descent) @@ -109,21 +152,85 @@ def _recurse_entity(self, entity, merge_base=None, seen=None, nodename=None): merge_base.merge(entity) return merge_base - def _nodeinfo(self, nodename): - node_entity = self._storage.get_node(nodename) - base_entity = Entity(name='base') + def _get_automatic_parameters(self, nodename, environment): + if self._settings.automatic_parameters: + pars = { + '_reclass_': { + 'name': { + 'full': nodename, + 'short': nodename.split('.')[0] + }, + 'environment': environment + } + } + return Parameters(pars, self._settings, '__auto__') + else: + return Parameters({}, self._settings, '') + + def _get_inventory(self, all_envs, environment, queries): + inventory = {} + for nodename in self._storage.enumerate_nodes(): + try: + node_base = self._storage.get_node(nodename, self._settings) + if node_base.environment is None: + node_base.environment = self._settings.default_environment + except yaml.scanner.ScannerError as e: + if self._settings.inventory_ignore_failed_node: + continue + raise + + if all_envs or node_base.environment == environment: + try: + node = self._node_entity(nodename) + except ClassNotFound as e: + raise InvQueryClassNotFound(e) + except ClassNameResolveError as e: + raise InvQueryClassNameResolveError(e) + if queries is None: + try: + node.interpolate_exports() + except InterpolationError as e: + e.nodename = nodename + else: + node.initialise_interpolation() + for p, q in queries: + try: + node.interpolate_single_export(q) + except InterpolationError as e: + e.nodename = nodename + raise InvQueryError(q.contents, e, context=p, uri=q.uri) + inventory[nodename] = node.exports.as_dict() + return inventory + + def _node_entity(self, nodename): + node_entity = self._storage.get_node(nodename, self._settings) + if node_entity.environment == None: + node_entity.environment = self._settings.default_environment + base_entity = Entity(self._settings, name='base') base_entity.merge(self._get_class_mappings_entity(node_entity.name)) base_entity.merge(self._get_input_data_entity()) + base_entity.merge_parameters(self._get_automatic_parameters(nodename, node_entity.environment)) seen = {} - merge_base = self._recurse_entity(base_entity, seen=seen, - nodename=base_entity.name) - ret = self._recurse_entity(node_entity, merge_base, seen=seen, - nodename=node_entity.name) - ret.interpolate() - return ret + merge_base = self._recurse_entity(base_entity, seen=seen, nodename=nodename, + environment=node_entity.environment) + return self._recurse_entity(node_entity, merge_base=merge_base, context=merge_base, seen=seen, + nodename=nodename, environment=node_entity.environment) + + def _nodeinfo(self, nodename, inventory): + try: + node = self._node_entity(nodename) + node.initialise_interpolation() + if node.parameters.has_inv_query and inventory is None: + inventory = self._get_inventory(node.parameters.needs_all_envs, node.environment, node.parameters.get_inv_queries()) + node.interpolate(inventory) + return node + except InterpolationError as e: + e.nodename = nodename + raise def _nodeinfo_as_dict(self, nodename, entity): - ret = {'__reclass__' : {'node': entity.name, 'name': nodename, + ret = {'__reclass__' : {'node': entity.name, + 'name': nodename, 'uri': entity.uri, 'environment': entity.environment, 'timestamp': Core._get_timestamp() @@ -133,17 +240,23 @@ def _nodeinfo_as_dict(self, nodename, entity): return ret def nodeinfo(self, nodename): - return self._nodeinfo_as_dict(nodename, self._nodeinfo(nodename)) + return self._nodeinfo_as_dict(nodename, self._nodeinfo(nodename, None)) def inventory(self): + query_nodes = set() entities = {} + inventory = self._get_inventory(True, '', None) for n in self._storage.enumerate_nodes(): - entities[n] = self._nodeinfo(n) + entities[n] = self._nodeinfo(n, inventory) + if entities[n].parameters.has_inv_query: + nodes.add(n) + for n in query_nodes: + entities[n] = self._nodeinfo(n, inventory) nodes = {} applications = {} classes = {} - for f, nodeinfo in entities.iteritems(): + for (f, nodeinfo) in iteritems(entities): d = nodes[f] = self._nodeinfo_as_dict(f, nodeinfo) for a in d['applications']: if a in applications: diff --git a/reclass/datatypes/__init__.py b/reclass/datatypes/__init__.py index 20f7551f..78c110bd 100644 --- a/reclass/datatypes/__init__.py +++ b/reclass/datatypes/__init__.py @@ -6,7 +6,13 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # -from applications import Applications -from classes import Classes -from entity import Entity -from parameters import Parameters +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from .applications import Applications +from .classes import Classes +from .entity import Entity +from .exports import Exports +from .parameters import Parameters diff --git a/reclass/datatypes/applications.py b/reclass/datatypes/applications.py index d024e971..4f6ee10b 100644 --- a/reclass/datatypes/applications.py +++ b/reclass/datatypes/applications.py @@ -6,8 +6,13 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals -from classes import Classes + +from .classes import Classes class Applications(Classes): ''' @@ -23,18 +28,14 @@ class Applications(Classes): def __init__(self, iterable=None, negation_prefix=DEFAULT_NEGATION_PREFIX): - self._negation_prefix = negation_prefix + self.negation_prefix = negation_prefix self._offset = len(negation_prefix) self._negations = [] super(Applications, self).__init__(iterable) - def _get_negation_prefix(self): - return self._negation_prefix - negation_prefix = property(_get_negation_prefix) - def append_if_new(self, item): self._assert_is_string(item) - if item.startswith(self._negation_prefix): + if item.startswith(self.negation_prefix): item = item[self._offset:] self._negations.append(item) try: @@ -59,6 +60,6 @@ def merge_unique(self, iterable): def __repr__(self): contents = self._items + \ - ['%s%s' % (self._negation_prefix, i) for i in self._negations] + ['%s%s' % (self.negation_prefix, i) for i in self._negations] return "%s(%r, %r)" % (self.__class__.__name__, contents, - self._negation_prefix) + str(self.negation_prefix)) diff --git a/reclass/datatypes/classes.py b/reclass/datatypes/classes.py index b8793a22..fa9cbcfb 100644 --- a/reclass/datatypes/classes.py +++ b/reclass/datatypes/classes.py @@ -6,13 +6,18 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals -import types +import six import os from reclass.errors import InvalidClassnameError INVALID_CHARACTERS_FOR_CLASSNAMES = ' ' + os.sep + class Classes(object): ''' A very limited ordered set of strings with O(n) uniqueness constraints. It @@ -51,8 +56,8 @@ def merge_unique(self, iterable): self.append_if_new(i) def _assert_is_string(self, item): - if not isinstance(item, types.StringTypes): - raise TypeError('%s instances can only contain strings, '\ + if not isinstance(item, six.string_types): + raise TypeError('%s instances can only contain strings, ' 'not %s' % (self.__class__.__name__, type(item))) def _assert_valid_characters(self, item): @@ -70,5 +75,4 @@ def append_if_new(self, item): self._append_if_new(item) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, - self._items) + return '%s(%r)' % (self.__class__.__name__, self._items) diff --git a/reclass/datatypes/entity.py b/reclass/datatypes/entity.py index 573a28c9..2e0e1e43 100644 --- a/reclass/datatypes/entity.py +++ b/reclass/datatypes/entity.py @@ -6,9 +6,15 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # -from classes import Classes -from applications import Applications -from parameters import Parameters +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from .classes import Classes +from .applications import Applications +from .exports import Exports +from .parameters import Parameters class Entity(object): ''' @@ -16,76 +22,94 @@ class Entity(object): for merging. The name and uri of an Entity will be updated to the name and uri of the Entity that is being merged. ''' - def __init__(self, classes=None, applications=None, parameters=None, - uri=None, name=None, environment=None): - if classes is None: classes = Classes() - self._set_classes(classes) - if applications is None: applications = Applications() - self._set_applications(applications) - if parameters is None: parameters = Parameters() - self._set_parameters(parameters) - self._uri = uri or '' - self._name = name or '' - self._environment = environment or '' + def __init__(self, settings, classes=None, applications=None, + parameters=None, exports=None, uri=None, name=None, + environment=None): + self._uri = '' if uri is None else uri + self._name = '' if name is None else name + self._classes = self._set_field(classes, Classes) + self._applications = self._set_field(applications, Applications) + pars = [None, settings, uri] + self._parameters = self._set_field(parameters, Parameters, pars) + self._exports = self._set_field(exports, Exports, pars) + self._environment = environment name = property(lambda s: s._name) uri = property(lambda s: s._uri) - environment = property(lambda s: s._environment) classes = property(lambda s: s._classes) applications = property(lambda s: s._applications) parameters = property(lambda s: s._parameters) + exports = property(lambda s: s._exports) - def _set_classes(self, classes): - if not isinstance(classes, Classes): - raise TypeError('Entity.classes cannot be set to '\ - 'instance of type %s' % type(classes)) - self._classes = classes + @property + def environment(self): + return self._environment - def _set_applications(self, applications): - if not isinstance(applications, Applications): - raise TypeError('Entity.applications cannot be set to '\ - 'instance of type %s' % type(applications)) - self._applications = applications + @environment.setter + def environment(self, value): + self._environment = value - def _set_parameters(self, parameters): - if not isinstance(parameters, Parameters): - raise TypeError('Entity.parameters cannot be set to '\ - 'instance of type %s' % type(parameters)) - self._parameters = parameters + def _set_field(self, received_value, expected_type, parameters=None): + if parameters is None: + parameters = [] + if received_value is None: + return expected_type(*parameters) + if not isinstance(received_value, expected_type): + raise TypeError('Entity.%s cannot be set to instance of type %s' % + (type(expected_type), type(received_value))) + return received_value def merge(self, other): - self._classes.merge_unique(other._classes) - self._applications.merge_unique(other._applications) - self._parameters.merge(other._parameters) + self._classes.merge_unique(other.classes) + self._applications.merge_unique(other.applications) + self._parameters.merge(other.parameters) + self._exports.merge(other.exports) self._name = other.name self._uri = other.uri - self._environment = other.environment + self._parameters._uri = other.uri + if other.environment != None: + self._environment = other.environment + + def merge_parameters(self, params): + self._parameters.merge(params) + + def interpolate(self, inventory): + self._parameters.interpolate(inventory) + self.interpolate_exports() + + def initialise_interpolation(self): + self._parameters.initialise_interpolation() + self._exports.initialise_interpolation() + + def interpolate_exports(self): + self.initialise_interpolation() + self._exports.interpolate_from_external(self._parameters) - def interpolate(self): - self._parameters.interpolate() + def interpolate_single_export(self, references): + self._exports.interpolate_single_from_external(self._parameters, references) def __eq__(self, other): return isinstance(other, type(self)) \ - and self._applications == other._applications \ - and self._classes == other._classes \ - and self._parameters == other._parameters \ - and self._name == other._name \ - and self._uri == other._uri + and self._applications == other.applications \ + and self._classes == other.classes \ + and self._parameters == other.parameters \ + and self._exports == other.exports \ + and self._name == other.name \ + and self._uri == other.uri def __ne__(self, other): return not self.__eq__(other) def __repr__(self): - return "%s(%r, %r, %r, uri=%r, name=%r)" % (self.__class__.__name__, - self.classes, - self.applications, - self.parameters, - self.uri, - self.name) + return "%s(%r, %r, %r, %r, uri=%r, name=%r, environment=%r)" % ( + self.__class__.__name__, self.classes, self.applications, + self.parameters, self.exports, self.uri, self.name, + self.environment) def as_dict(self): return {'classes': self._classes.as_list(), 'applications': self._applications.as_list(), 'parameters': self._parameters.as_dict(), + 'exports': self._exports.as_dict(), 'environment': self._environment } diff --git a/reclass/datatypes/exports.py b/reclass/datatypes/exports.py new file mode 100644 index 00000000..984a15a5 --- /dev/null +++ b/reclass/datatypes/exports.py @@ -0,0 +1,95 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass (http://github.com/madduck/reclass) +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import copy + +from six import iteritems, next + +from .parameters import Parameters +from reclass.errors import ResolveError +from reclass.values.value import Value +from reclass.values.valuelist import ValueList +from reclass.utils.dictpath import DictPath + +class Exports(Parameters): + + def __init__(self, mapping, settings, uri): + super(Exports, self).__init__(mapping, settings, uri) + + def delete_key(self, key): + self._base.pop(key, None) + self._unrendered.pop(key, None) + + def overwrite(self, other): + overdict = {'~' + key: value for (key, value) in iteritems(other)} + self.merge(overdict) + + def interpolate_from_external(self, external): + while len(self._unrendered) > 0: + path, v = next(iteritems(self._unrendered)) + value = path.get_value(self._base) + if isinstance(value, (Value, ValueList)): + external._interpolate_references(path, value, None) + new = self._interpolate_render_from_external(external._base, path, value) + path.set_value(self._base, new) + del self._unrendered[path] + else: + # references to lists and dicts are only deepcopied when merged + # together so it's possible a value with references in a referenced + # list or dict has already been rendered + del self._unrendered[path] + + def interpolate_single_from_external(self, external, query): + for r in query.get_inv_references(): + self._interpolate_single_path_from_external(r, external, query) + + def _interpolate_single_path_from_external(self, mainpath, external, query): + required = self._get_required_paths(mainpath) + while len(required) > 0: + while len(required) > 0: + path, v = next(iteritems(required)) + value = path.get_value(self._base) + if isinstance(value, (Value, ValueList)): + try: + external._interpolate_references(path, value, None) + new = self._interpolate_render_from_external(external._base, path, value) + path.set_value(self._base, new) + except ResolveError as e: + if query.ignore_failed_render(): + path.delete(self._base) + else: + raise + del required[path] + del self._unrendered[path] + required = self._get_required_paths(mainpath) + + def _get_required_paths(self, mainpath): + paths = {} + path = DictPath(self._settings.delimiter) + for i in mainpath.key_parts(): + path.add_subpath(i) + if path in self._unrendered: + paths[path] = True + for i in self._unrendered: + if mainpath.is_ancestor_of(i) or mainpath == i: + paths[i] = True + return paths + + def _interpolate_render_from_external(self, context, path, value): + try: + new = value.render(context, None) + except ResolveError as e: + e.context = path + raise + if isinstance(new, dict): + new = self._render_simple_dict(new, path) + elif isinstance(new, list): + new = self._render_simple_list(new, path) + return new diff --git a/reclass/datatypes/parameters.py b/reclass/datatypes/parameters.py index a39324ea..bab2a281 100644 --- a/reclass/datatypes/parameters.py +++ b/reclass/datatypes/parameters.py @@ -6,12 +6,28 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import copy +import sys import types -from reclass.defaults import PARAMETER_INTERPOLATION_DELIMITER,\ - PARAMETER_DICT_KEY_OVERRIDE_PREFIX + +from six import iteritems, next + +from collections import namedtuple from reclass.utils.dictpath import DictPath -from reclass.utils.refvalue import RefValue -from reclass.errors import InfiniteRecursionError, UndefinedVariableError +from reclass.utils.parameterdict import ParameterDict +from reclass.utils.parameterlist import ParameterList +from reclass.values.value import Value +from reclass.values.valuelist import ValueList +from reclass.errors import InfiniteRecursionError, ResolveError +from reclass.errors import ResolveErrorList, InterpolationError, ParseError +from reclass.errors import BadReferencesError + class Parameters(object): ''' @@ -36,91 +52,105 @@ class Parameters(object): To support these specialities, this class only exposes very limited functionality and does not try to be a really mapping object. ''' - DEFAULT_PATH_DELIMITER = PARAMETER_INTERPOLATION_DELIMITER - DICT_KEY_OVERRIDE_PREFIX = PARAMETER_DICT_KEY_OVERRIDE_PREFIX - - def __init__(self, mapping=None, delimiter=None): - if delimiter is None: - delimiter = Parameters.DEFAULT_PATH_DELIMITER - self._delimiter = delimiter - self._base = {} - self._occurrences = {} - if mapping is not None: - # we initialise by merging, otherwise the list of references might - # not be updated - self.merge(mapping, initmerge=True) - delimiter = property(lambda self: self._delimiter) + def __init__(self, mapping, settings, uri, parse_strings=True): + self._settings = settings + self._uri = uri + self._base = ParameterDict(uri=self._uri) + self._unrendered = None + self._inv_queries = [] + self.resolve_errors = ResolveErrorList() + self.needs_all_envs = False + self._parse_strings = parse_strings + if mapping is not None: + # initialise by merging + self.merge(mapping) def __len__(self): return len(self._base) def __repr__(self): - return '%s(%r, %r)' % (self.__class__.__name__, self._base, - self.delimiter) + return '%s(%r)' % (self.__class__.__name__, self._base) def __eq__(self, other): return isinstance(other, type(self)) \ and self._base == other._base \ - and self._delimiter == other._delimiter + and self._settings == other._settings def __ne__(self, other): return not self.__eq__(other) + @property + def has_inv_query(self): + return len(self._inv_queries) > 0 + + def get_inv_queries(self): + return self._inv_queries + def as_dict(self): return self._base.copy() - def _update_scalar(self, cur, new, path): - if isinstance(cur, RefValue) and path in self._occurrences: - # If the current value already holds a RefValue, we better forget - # the occurrence, or else interpolate() will later overwrite - # unconditionally. If the new value is a RefValue, the occurrence - # will be added again further on - del self._occurrences[path] - - if self.delimiter is None or not isinstance(new, (types.StringTypes, - RefValue)): - # either there is no delimiter defined (and hence no references - # are being used), or the new value is not a string (and hence - # cannot be turned into a RefValue), and not a RefValue. We can - # shortcut and just return the new scalar - return new - - elif isinstance(new, RefValue): - # the new value is (already) a RefValue, so we need not touch it - # at all - ret = new + def _wrap_value(self, value): + if isinstance(value, (Value, ValueList)): + return value + elif isinstance(value, dict): + return self._wrap_dict(value) + elif isinstance(value, list): + return self._wrap_list(value) + else: + try: + return Value(value, self._settings, self._uri, + parse_string=self._parse_strings) + except InterpolationError as e: + e.context = DictPath(self._settings.delimiter) + raise + def _get_wrapped(self, position, value): + try: + return self._wrap_value(value) + except InterpolationError as e: + e.context.add_ancestor(str(position)) + raise + + def _wrap_list(self, source): + l = ParameterList(uri=self._uri) + for (k, v) in enumerate(source): + l.append(self._get_wrapped(k, v)) + return l + + def _wrap_dict(self, source): + d = ParameterDict(uri=self._uri) + for (k, v) in iteritems(source): + d[k] = self._get_wrapped(k, v) + return d + + def _update_value(self, cur, new): + if isinstance(cur, Value): + values = ValueList(cur, self._settings) + elif isinstance(cur, ValueList): + values = cur else: - # the new value is a string, let's see if it contains references, - # by way of wrapping it in a RefValue and querying the result - ret = RefValue(new, self.delimiter) - if not ret.has_references(): - # do not replace with RefValue instance if there are no - # references, i.e. discard the RefValue in ret, just return - # the new value - return new + if isinstance(cur, (ParameterDict, ParameterList)): + uri = cur.uri + else: + uri = self._uri + values = ValueList(Value(cur, self._settings, uri), self._settings) - # So we now have a RefValue. Let's, keep a reference to the instance - # we just created, in a dict indexed by the dictionary path, instead - # of just a list. The keys are required to resolve dependencies during - # interpolation - self._occurrences[path] = ret - return ret - - def _extend_list(self, cur, new, path): - if isinstance(cur, list): - ret = cur - offset = len(cur) + if isinstance(new, Value): + values.append(new) + elif isinstance(new, ValueList): + values.extend(new) else: - ret = [cur] - offset = 1 + if isinstance(new, (ParameterDict, ParameterList)): + uri = new.uri + else: + uri = self._uri + values.append(Value(new, self._settings, uri, + parse_string=self._parse_strings)) - for i in xrange(len(new)): - ret.append(self._merge_recurse(None, new[i], path.new_subpath(offset + i))) - return ret + return values - def _merge_dict(self, cur, new, path, initmerge): + def _merge_dict(self, cur, new): """Merge a dictionary with another dictionary. Iterate over keys in new. If this is not an initialization merge and @@ -131,7 +161,6 @@ def _merge_dict(self, cur, new, path, initmerge): Args: cur (dict): Current dictionary new (dict): Dictionary to be merged - path (string): Merging path from recursion initmerge (bool): True if called as part of entity init Returns: @@ -139,66 +168,54 @@ def _merge_dict(self, cur, new, path, initmerge): """ - if isinstance(cur, dict): - ret = cur - else: - # nothing sensible to do - raise TypeError('Cannot merge dict into {0} ' - 'objects'.format(type(cur))) - - if self.delimiter is None: - # a delimiter of None indicates that there is no value - # processing to be done, and since there is no current - # value, we do not need to walk the new dictionary: - ret.update(new) - return ret - - ovrprfx = Parameters.DICT_KEY_OVERRIDE_PREFIX - - for key, newvalue in new.iteritems(): - if key.startswith(ovrprfx) and not initmerge: - ret[key.lstrip(ovrprfx)] = newvalue + for (key, value) in iteritems(new): + # check key for "control" preffixes (~,=,...) + key = str(key) + if key[0] in self._settings.dict_key_prefixes: + newkey = key[1:] + if not isinstance(value, Value): + value = Value(value, self._settings, self._uri, parse_string=self._parse_strings) + if key[0] == self._settings.dict_key_override_prefix: + value.overwrite = True + elif key[0] == self._settings.dict_key_constant_prefix: + value.constant = True + value = self._merge_recurse(cur.get(newkey), value) + key = newkey else: - ret[key] = self._merge_recurse(ret.get(key), newvalue, - path.new_subpath(key), initmerge) - return ret + value = self._merge_recurse(cur.get(key), value) + cur[key] = value + cur.uri = new.uri + return cur - def _merge_recurse(self, cur, new, path=None, initmerge=False): + def _merge_recurse(self, cur, new): """Merge a parameter with another parameter. - Iterate over keys in new. Call _merge_dict, _extend_list, or - _update_scalar depending on type. Pass along whether this is an - initialization merge. + Iterate over keys in new. Call _merge_dict, _update_value + depending on type. Args: - cur (dict): Current dictionary - new (dict): Dictionary to be merged - path (string): Merging path from recursion - initmerge (bool): True if called as part of entity init, defaults - to False + cur: Current parameter + new: Parameter to be merged Returns: - dict: a merged dictionary + merged parameter (Value or ValueList) """ - if path is None: - path = DictPath(self.delimiter) - if isinstance(new, dict): if cur is None: - cur = {} - return self._merge_dict(cur, new, path, initmerge) - - elif isinstance(new, list): - if cur is None: - cur = [] - return self._extend_list(cur, new, path) - + cur = ParameterDict(uri=self._uri) + if isinstance(cur, dict): + return self._merge_dict(cur, new) + else: + return self._update_value(cur, new) else: - return self._update_scalar(cur, new, path) + if cur is None: + return new + else: + return self._update_value(cur, new) - def merge(self, other, initmerge=False): + def merge(self, other): """Merge function (public edition). Call _merge_recurse on self with either another Parameter object or a @@ -212,65 +229,150 @@ def merge(self, other, initmerge=False): """ + self._unrendered = None if isinstance(other, dict): - self._base = self._merge_recurse(self._base, other, - None, initmerge) - + wrapped = self._wrap_dict(other) elif isinstance(other, self.__class__): - self._base = self._merge_recurse(self._base, other._base, - None, initmerge) - + wrapped = other._wrap_dict(other._base) else: raise TypeError('Cannot merge %s objects into %s' % (type(other), self.__class__.__name__)) - - def has_unresolved_refs(self): - return len(self._occurrences) > 0 - - def interpolate(self): - while self.has_unresolved_refs(): + self._base = self._merge_recurse(self._base, wrapped) + + def _render_simple_container(self, container, key, value, path): + if isinstance(value, ValueList): + if value.is_complex: + p = path.new_subpath(key) + self._unrendered[p] = True + container[key] = value + if value.has_inv_query: + self._inv_queries.append((p, value)) + if value.needs_all_envs: + self.needs_all_envs = True + return + else: + value = value.merge() + if isinstance(value, Value) and value.is_container(): + value = value.contents + if isinstance(value, dict): + container[key] = self._render_simple_dict(value, path.new_subpath(key)) + elif isinstance(value, list): + container[key] = self._render_simple_list(value, path.new_subpath(key)) + elif isinstance(value, Value): + if value.is_complex: + p = path.new_subpath(key) + self._unrendered[p] = True + container[key] = value + if value.has_inv_query: + self._inv_queries.append((p, value)) + if value.needs_all_envs: + self.needs_all_envs = True + else: + container[key] = value.render(None, None) + else: + container[key] = value + + def _render_simple_dict(self, dictionary, path): + new_dict = {} + for (key, value) in iteritems(dictionary): + self._render_simple_container(new_dict, key, value, path) + return new_dict + + def _render_simple_list(self, item_list, path): + new_list = [ None ] * len(item_list) + for n, value in enumerate(item_list): + self._render_simple_container(new_list, n, value, path) + return new_list + + def interpolate(self, inventory=None): + self._initialise_interpolate() + while len(self._unrendered) > 0: # we could use a view here, but this is simple enough: # _interpolate_inner removes references from the refs hash after # processing them, so we cannot just iterate the dict - path, refvalue = self._occurrences.iteritems().next() - self._interpolate_inner(path, refvalue) - - def _interpolate_inner(self, path, refvalue): - self._occurrences[path] = True # mark as seen - for ref in refvalue.get_references(): - path_from_ref = DictPath(self.delimiter, ref) - try: - refvalue_inner = self._occurrences[path_from_ref] - - # If there is no reference, then this will throw a KeyError, - # look further down where this is caught and execution passed - # to the next iteration of the loop - # - # If we get here, then the ref references another parameter, - # requiring us to recurse, dereferencing first those refs that - # are most used and are thus at the leaves of the dependency - # tree. - - if refvalue_inner is True: - # every call to _interpolate_inner replaces the value of - # the saved occurrences of a reference with True. - # Therefore, if we encounter True instead of a refvalue, - # it means that we have already processed it and are now - # faced with a cyclical reference. - raise InfiniteRecursionError(path, ref) - self._interpolate_inner(path_from_ref, refvalue_inner) - - except KeyError as e: - # not actually an error, but we are done resolving all - # dependencies of the current ref, so move on - continue - + path, v = next(iteritems(self._unrendered)) + self._interpolate_inner(path, inventory) + if self.resolve_errors.have_errors(): + raise self.resolve_errors + + def initialise_interpolation(self): + self._unrendered = None + self._initialise_interpolate() + + def _initialise_interpolate(self): + if self._unrendered is None: + self._unrendered = {} + self._inv_queries = [] + self.needs_all_envs = False + self.resolve_errors = ResolveErrorList() + self._base = self._render_simple_dict(self._base, + DictPath(self._settings.delimiter)) + + def _interpolate_inner(self, path, inventory): + value = path.get_value(self._base) + if not isinstance(value, (Value, ValueList)): + # references to lists and dicts are only deepcopied when merged + # together so it's possible a value with references in a referenced + # list or dict has already been visited by _interpolate_inner + del self._unrendered[path] + return + self._unrendered[path] = False + self._interpolate_references(path, value, inventory) + new = self._interpolate_render_value(path, value, inventory) + path.set_value(self._base, new) + del self._unrendered[path] + + def _interpolate_render_value(self, path, value, inventory): try: - new = refvalue.render(self._base) - path.set_value(self._base, new) - - # finally, remove the reference from the occurrences cache - del self._occurrences[path] - except UndefinedVariableError as e: - raise UndefinedVariableError(e.var, path) + new = value.render(self._base, inventory) + except ResolveError as e: + e.context = path + if self._settings.group_errors: + self.resolve_errors.add(e) + new = None + else: + raise + except InterpolationError as e: + e.context = path + raise + if isinstance(new, dict): + new = self._render_simple_dict(new, path) + elif isinstance(new, list): + new = self._render_simple_list(new, path) + return new + + def _interpolate_references(self, path, value, inventory): + all_refs = False + while not all_refs: + for ref in value.get_references(): + path_from_ref = DictPath(self._settings.delimiter, ref) + + if path_from_ref in self._unrendered: + if self._unrendered[path_from_ref] is False: + # every call to _interpolate_inner replaces the value of + # self._unrendered[path] with False + # Therefore, if we encounter False instead of True, + # it means that we have already processed it and are now + # faced with a cyclical reference. + raise InfiniteRecursionError(path, ref, value.uri) + else: + self._interpolate_inner(path_from_ref, inventory) + else: + # ensure ancestor keys are already dereferenced + ancestor = DictPath(self._settings.delimiter) + for k in path_from_ref.key_parts(): + ancestor = ancestor.new_subpath(k) + if ancestor in self._unrendered: + self._interpolate_inner(ancestor, inventory) + if value.allRefs: + all_refs = True + else: + # not all references in the value could be calculated previously so + # try recalculating references with current context and recursively + # call _interpolate_inner if the number of references has increased + # Otherwise raise an error + old = len(value.get_references()) + value.assembleRefs(self._base) + if old == len(value.get_references()): + raise BadReferencesError(value.get_references(), str(path), value.uri) diff --git a/reclass/datatypes/tests/__init__.py b/reclass/datatypes/tests/__init__.py index e69de29b..9aaaf25a 100644 --- a/reclass/datatypes/tests/__init__.py +++ b/reclass/datatypes/tests/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals diff --git a/reclass/datatypes/tests/test_applications.py b/reclass/datatypes/tests/test_applications.py index 307a430a..5c896f0b 100644 --- a/reclass/datatypes/tests/test_applications.py +++ b/reclass/datatypes/tests/test_applications.py @@ -6,8 +6,14 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from reclass.datatypes import Applications, Classes import unittest + try: import unittest.mock as mock except ImportError: @@ -63,7 +69,7 @@ def test_repr_negations_interspersed(self): l = ['a', '~b', 'a', '~d'] a = Applications(l) is_negation = lambda x: x.startswith(a.negation_prefix) - GOAL = filter(lambda x: not is_negation(x), set(l)) + filter(is_negation, l) + GOAL = list(filter(lambda x: not is_negation(x), set(l))) + list(filter(is_negation, l)) self.assertEqual('%r' % a, "%s(%r, '~')" % (a.__class__.__name__, GOAL)) if __name__ == '__main__': diff --git a/reclass/datatypes/tests/test_classes.py b/reclass/datatypes/tests/test_classes.py index 33d179fc..9b9e419c 100644 --- a/reclass/datatypes/tests/test_classes.py +++ b/reclass/datatypes/tests/test_classes.py @@ -6,9 +6,15 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from reclass.datatypes import Classes from reclass.datatypes.classes import INVALID_CHARACTERS_FOR_CLASSNAMES import unittest + try: import unittest.mock as mock except ImportError: @@ -72,8 +78,9 @@ def test_append_if_new_nonstring(self): def test_append_invalid_characters(self): c = Classes() invalid_name = ' '.join(('foo', 'bar')) - with self.assertRaises(InvalidClassnameError): + with self.assertRaises(InvalidClassnameError) as e: c.append_if_new(invalid_name) + self.assertEqual(e.exception.message, "Invalid character ' ' in class name 'foo bar'.") def test_merge_unique(self): c = Classes(TESTLIST1) diff --git a/reclass/datatypes/tests/test_entity.py b/reclass/datatypes/tests/test_entity.py index 17ec9e86..f18f3fcc 100644 --- a/reclass/datatypes/tests/test_entity.py +++ b/reclass/datatypes/tests/test_entity.py @@ -6,91 +6,104 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # -from reclass.datatypes import Entity, Classes, Parameters, Applications +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass.settings import Settings +from reclass.datatypes import Entity, Classes, Parameters, Applications, Exports +from reclass.errors import ResolveError import unittest + try: import unittest.mock as mock except ImportError: import mock +SETTINGS = Settings() + @mock.patch.multiple('reclass.datatypes', autospec=True, Classes=mock.DEFAULT, - Applications=mock.DEFAULT, - Parameters=mock.DEFAULT) + Applications=mock.DEFAULT, Parameters=mock.DEFAULT, + Exports=mock.DEFAULT) class TestEntity(unittest.TestCase): - def _make_instances(self, Classes, Applications, Parameters): - return Classes(), Applications(), Parameters() + def _make_instances(self, Classes, Applications, Parameters, Exports): + return Classes(), Applications(), Parameters({}, SETTINGS, ""), Exports({}, SETTINGS, "") def test_constructor_default(self, **mocks): # Actually test the real objects by calling the default constructor, # all other tests shall pass instances to the constructor - e = Entity() + e = Entity(SETTINGS) self.assertEqual(e.name, '') self.assertEqual(e.uri, '') self.assertIsInstance(e.classes, Classes) self.assertIsInstance(e.applications, Applications) self.assertIsInstance(e.parameters, Parameters) + self.assertIsInstance(e.exports, Exports) def test_constructor_empty(self, **types): instances = self._make_instances(**types) - e = Entity(*instances) + e = Entity(SETTINGS, *instances) self.assertEqual(e.name, '') self.assertEqual(e.uri, '') - cl, al, pl = [getattr(i, '__len__') for i in instances] + cl, al, pl, ex = [getattr(i, '__len__') for i in instances] self.assertEqual(len(e.classes), cl.return_value) cl.assert_called_once_with() self.assertEqual(len(e.applications), al.return_value) al.assert_called_once_with() self.assertEqual(len(e.parameters), pl.return_value) pl.assert_called_once_with() + self.assertEqual(len(e.exports), pl.return_value) + ex.assert_called_once_with() def test_constructor_empty_named(self, **types): name = 'empty' - e = Entity(*self._make_instances(**types), name=name) + e = Entity(SETTINGS, *self._make_instances(**types), name=name) self.assertEqual(e.name, name) def test_constructor_empty_uri(self, **types): uri = 'test://uri' - e = Entity(*self._make_instances(**types), uri=uri) + e = Entity(SETTINGS, *self._make_instances(**types), uri=uri) self.assertEqual(e.uri, uri) def test_constructor_empty_env(self, **types): env = 'not base' - e = Entity(*self._make_instances(**types), environment=env) + e = Entity(SETTINGS, *self._make_instances(**types), environment=env) self.assertEqual(e.environment, env) def test_equal_empty(self, **types): instances = self._make_instances(**types) - self.assertEqual(Entity(*instances), Entity(*instances)) + self.assertEqual(Entity(SETTINGS, *instances), Entity(SETTINGS, *instances)) for i in instances: i.__eq__.assert_called_once_with(i) def test_equal_empty_named(self, **types): instances = self._make_instances(**types) - self.assertEqual(Entity(*instances), Entity(*instances)) + self.assertEqual(Entity(SETTINGS, *instances), Entity(SETTINGS, *instances)) name = 'empty' - self.assertEqual(Entity(*instances, name=name), - Entity(*instances, name=name)) + self.assertEqual(Entity(SETTINGS, *instances, name=name), + Entity(SETTINGS, *instances, name=name)) def test_unequal_empty_uri(self, **types): instances = self._make_instances(**types) uri = 'test://uri' - self.assertNotEqual(Entity(*instances, uri=uri), - Entity(*instances, uri=uri[::-1])) + self.assertNotEqual(Entity(SETTINGS, *instances, uri=uri), + Entity(SETTINGS, *instances, uri=uri[::-1])) for i in instances: i.__eq__.assert_called_once_with(i) def test_unequal_empty_named(self, **types): instances = self._make_instances(**types) name = 'empty' - self.assertNotEqual(Entity(*instances, name=name), - Entity(*instances, name=name[::-1])) + self.assertNotEqual(Entity(SETTINGS, *instances, name=name), + Entity(SETTINGS, *instances, name=name[::-1])) for i in instances: i.__eq__.assert_called_once_with(i) def test_unequal_types(self, **types): instances = self._make_instances(**types) - self.assertNotEqual(Entity(*instances, name='empty'), + self.assertNotEqual(Entity(SETTINGS, *instances, name='empty'), None) for i in instances: self.assertEqual(i.__eq__.call_count, 0) @@ -98,7 +111,7 @@ def test_unequal_types(self, **types): def _test_constructor_wrong_types(self, which_replace, **types): instances = self._make_instances(**types) instances[which_replace] = 'Invalid type' - e = Entity(*instances) + e = Entity(SETTINGS, *instances) def test_constructor_wrong_type_classes(self, **types): self.assertRaises(TypeError, self._test_constructor_wrong_types, 0) @@ -111,7 +124,7 @@ def test_constructor_wrong_type_parameters(self, **types): def test_merge(self, **types): instances = self._make_instances(**types) - e = Entity(*instances) + e = Entity(SETTINGS, *instances) e.merge(e) for i, fn in zip(instances, ('merge_unique', 'merge_unique', 'merge')): getattr(i, fn).assert_called_once_with(i) @@ -119,38 +132,167 @@ def test_merge(self, **types): def test_merge_newname(self, **types): instances = self._make_instances(**types) newname = 'newname' - e1 = Entity(*instances, name='oldname') - e2 = Entity(*instances, name=newname) + e1 = Entity(SETTINGS, *instances, name='oldname') + e2 = Entity(SETTINGS, *instances, name=newname) e1.merge(e2) self.assertEqual(e1.name, newname) def test_merge_newuri(self, **types): instances = self._make_instances(**types) newuri = 'test://uri2' - e1 = Entity(*instances, uri='test://uri1') - e2 = Entity(*instances, uri=newuri) + e1 = Entity(SETTINGS, *instances, uri='test://uri1') + e2 = Entity(SETTINGS, *instances, uri=newuri) e1.merge(e2) self.assertEqual(e1.uri, newuri) def test_merge_newenv(self, **types): instances = self._make_instances(**types) newenv = 'new env' - e1 = Entity(*instances, environment='env') - e2 = Entity(*instances, environment=newenv) + e1 = Entity(SETTINGS, *instances, environment='env') + e2 = Entity(SETTINGS, *instances, environment=newenv) e1.merge(e2) self.assertEqual(e1.environment, newenv) def test_as_dict(self, **types): instances = self._make_instances(**types) - entity = Entity(*instances, name='test', environment='test') + entity = Entity(SETTINGS, *instances, name='test', environment='test') comp = {} comp['classes'] = instances[0].as_list() comp['applications'] = instances[1].as_list() comp['parameters'] = instances[2].as_dict() + comp['exports'] = instances[3].as_dict() comp['environment'] = 'test' d = entity.as_dict() self.assertDictEqual(d, comp) +class TestEntityNoMock(unittest.TestCase): + + def test_interpolate_list_types(self): + node1_exports = Exports({'exps': [ '${one}' ] }, SETTINGS, 'first') + node1_parameters = Parameters({'alpha': [ '${two}', '${three}' ], 'one': 1, 'two': 2, 'three': 3 }, SETTINGS, 'first') + node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) + node2_exports = Exports({'exps': '${alpha}' }, SETTINGS, 'second') + node2_parameters = Parameters({}, SETTINGS, 'second') + node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) + r = {'exps': [ 1, 2, 3 ]} + node1_entity.merge(node2_entity) + node1_entity.interpolate(None) + self.assertIs(type(node1_entity.exports.as_dict()['exps']), list) + self.assertDictEqual(node1_entity.exports.as_dict(), r) + + def test_exports_with_refs(self): + inventory = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} + node3_exports = Exports({'a': '${a}', 'b': '${b}'}, SETTINGS, '') + node3_parameters = Parameters({'name': 'node3', 'a': '${c}', 'b': 5}, SETTINGS, '') + node3_parameters.merge({'c': 3}) + node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) + node3_entity.interpolate_exports() + inventory['node3'] = node3_entity.exports.as_dict() + r = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 3, 'b': 5}} + self.assertDictEqual(inventory, r) + + def test_reference_to_an_export(self): + inventory = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} + node3_exports = Exports({'a': '${a}', 'b': '${b}'}, SETTINGS, '') + node3_parameters = Parameters({'name': 'node3', 'ref': '${exp}', 'a': '${c}', 'b': 5}, SETTINGS, '') + node3_parameters.merge({'c': 3, 'exp': '$[ exports:a ]'}) + node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) + node3_entity.interpolate_exports() + inventory['node3'] = node3_entity.exports.as_dict() + node3_entity.interpolate(inventory) + res_inv = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 3, 'b': 5}} + res_params = {'a': 3, 'c': 3, 'b': 5, 'name': 'node3', 'exp': {'node1': 1, 'node3': 3, 'node2': 3}, 'ref': {'node1': 1, 'node3': 3, 'node2': 3}} + self.assertDictEqual(node3_parameters.as_dict(), res_params) + self.assertDictEqual(inventory, res_inv) + + def test_exports_multiple_nodes(self): + node1_exports = Exports({'a': '${a}'}, SETTINGS, '') + node1_parameters = Parameters({'name': 'node1', 'a': { 'test': '${b}' }, 'b': 1, 'exp': '$[ exports:a ]'}, SETTINGS, '') + node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) + node2_exports = Exports({'a': '${a}'}, SETTINGS, '') + node2_parameters = Parameters({'name': 'node2', 'a': { 'test': '${b}' }, 'b': 2 }, SETTINGS, '') + node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) + node1_entity.initialise_interpolation() + node2_entity.initialise_interpolation() + queries = node1_entity.parameters.get_inv_queries() + for p, q in queries: + node1_entity.interpolate_single_export(q) + node2_entity.interpolate_single_export(q) + res_inv = {'node1': {'a': {'test': 1}}, 'node2': {'a': {'test': 2}}} + res_params = {'a': {'test': 1}, 'b': 1, 'name': 'node1', 'exp': {'node1': {'test': 1}, 'node2': {'test': 2}}} + inventory = {} + inventory['node1'] = node1_entity.exports.as_dict() + inventory['node2'] = node2_entity.exports.as_dict() + node1_entity.interpolate(inventory) + self.assertDictEqual(node1_parameters.as_dict(), res_params) + self.assertDictEqual(inventory, res_inv) + + def test_exports_with_ancestor_references(self): + inventory = {'node1': {'alpha' : {'beta': {'a': 1, 'b': 2}}}, 'node2': {'alpha' : {'beta': {'a': 3, 'b': 4}}}} + node3_exports = Exports({'alpha': '${alpha}'}, SETTINGS, '') + node3_parameters = Parameters({'name': 'node3', 'alpha': {'beta' : {'a': 5, 'b': 6}}, 'exp': '$[ exports:alpha:beta ]'}, SETTINGS, '') + node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) + res_params = {'exp': {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 5, 'b': 6}}, 'name': 'node3', 'alpha': {'beta': {'a': 5, 'b': 6}}} + res_inv = {'node1': {'alpha' : {'beta': {'a': 1, 'b': 2}}}, 'node2': {'alpha' : {'beta': {'a': 3, 'b': 4}}}, 'node3': {'alpha' : {'beta': {'a': 5, 'b': 6}}}} + node3_entity.initialise_interpolation() + queries = node3_entity.parameters.get_inv_queries() + for p, q in queries: + node3_entity.interpolate_single_export(q) + inventory['node3'] = node3_entity.exports.as_dict() + node3_entity.interpolate(inventory) + self.assertDictEqual(node3_parameters.as_dict(), res_params) + self.assertDictEqual(inventory, res_inv) + + def test_exports_with_nested_references(self): + inventory = {'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}} + node3_exports = Exports({'alpha': '${alpha}'}, SETTINGS, '') + node3_parameters = Parameters({'name': 'node3', 'alpha': {'a': '${one}', 'b': '${two}'}, 'beta': '$[ exports:alpha ]', 'one': '111', 'two': '${three}', 'three': '123'}, SETTINGS, '') + node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) + res_params = {'beta': {'node1': {'a': 1, 'b': 2}, 'node3': {'a': '111', 'b': '123'}, 'node2': {'a': 3, 'b': 4}}, 'name': 'node3', 'alpha': {'a': '111', 'b': '123'}, 'three': '123', 'two': '123', 'one': '111'} + res_inv = {'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}, 'node3': {'alpha': {'a': '111', 'b': '123'}}} + node3_entity.interpolate_exports() + inventory['node3'] = node3_entity.exports.as_dict() + node3_entity.interpolate(inventory) + self.assertDictEqual(node3_parameters.as_dict(), res_params) + self.assertDictEqual(inventory, res_inv) + + def test_exports_failed_render(self): + node1_exports = Exports({'a': '${a}'}, SETTINGS, '') + node1_parameters = Parameters({'name': 'node1', 'a': 1, 'exp': '$[ exports:a ]'}, SETTINGS, '') + node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) + node2_exports = Exports({'a': '${b}'}, SETTINGS, '') + node2_parameters = Parameters({'name': 'node2', 'a': 2}, SETTINGS, '') + node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) + node1_entity.initialise_interpolation() + node2_entity.initialise_interpolation() + queries = node1_entity.parameters.get_inv_queries() + with self.assertRaises(ResolveError) as e: + for p, q in queries: + node1_entity.interpolate_single_export(q) + node2_entity.interpolate_single_export(q) + self.assertEqual(e.exception.message, "-> \n Cannot resolve ${b}, at a") + + def test_exports_failed_render_ignore(self): + node1_exports = Exports({'a': '${a}'}, SETTINGS, '') + node1_parameters = Parameters({'name': 'node1', 'a': 1, 'exp': '$[ +IgnoreErrors exports:a ]'}, SETTINGS, '') + node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) + node2_exports = Exports({'a': '${b}'}, SETTINGS, '') + node2_parameters = Parameters({'name': 'node1', 'a': 2}, SETTINGS, '') + node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) + node1_entity.initialise_interpolation() + node2_entity.initialise_interpolation() + queries = node1_entity.parameters.get_inv_queries() + for p, q in queries: + node1_entity.interpolate_single_export(q) + node2_entity.interpolate_single_export(q) + res_inv = {'node1': {'a': 1}, 'node2': {}} + res_params = { 'a': 1, 'name': 'node1', 'exp': {'node1': 1} } + inventory = {} + inventory['node1'] = node1_entity.exports.as_dict() + inventory['node2'] = node2_entity.exports.as_dict() + node1_entity.interpolate(inventory) + self.assertDictEqual(node1_parameters.as_dict(), res_params) + self.assertDictEqual(inventory, res_inv) if __name__ == '__main__': unittest.main() diff --git a/reclass/datatypes/tests/test_exports.py b/reclass/datatypes/tests/test_exports.py new file mode 100644 index 00000000..16a45cb4 --- /dev/null +++ b/reclass/datatypes/tests/test_exports.py @@ -0,0 +1,133 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass (http://github.com/madduck/reclass) +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass.utils.parameterdict import ParameterDict +from reclass.utils.parameterlist import ParameterList +from reclass.settings import Settings +from reclass.datatypes import Exports, Parameters +from reclass.errors import ParseError +import unittest + +SETTINGS = Settings() + +class TestInvQuery(unittest.TestCase): + + def test_overwrite_method(self): + e = Exports({'alpha': { 'one': 1, 'two': 2}}, SETTINGS, '') + d = {'alpha': { 'three': 3, 'four': 4}} + e.overwrite(d) + e.interpolate() + self.assertEqual(e.as_dict(), d) + + def test_interpolate_types(self): + e = Exports({'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]}, SETTINGS, '') + r = {'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]} + self.assertIs(type(e.as_dict()['alpha']), ParameterDict) + self.assertIs(type(e.as_dict()['beta']), ParameterList) + e.interpolate() + self.assertIs(type(e.as_dict()['alpha']), dict) + self.assertIs(type(e.as_dict()['beta']), list) + self.assertEqual(e.as_dict(), r) + + def test_malformed_invquery(self): + with self.assertRaises(ParseError): + p = Parameters({'exp': '$[ exports:a exports:b == self:test_value ]'}, SETTINGS, '') + with self.assertRaises(ParseError): + p = Parameters({'exp': '$[ exports:a if exports:b self:test_value ]'}, SETTINGS, '') + with self.assertRaises(ParseError): + p = Parameters({'exp': '$[ exports:a if exports:b == ]'}, SETTINGS, '') + with self.assertRaises(ParseError): + p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value and exports:c = self:test_value2 ]'}, SETTINGS, '') + with self.assertRaises(ParseError): + p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value or exports:c == ]'}, SETTINGS, '') + with self.assertRaises(ParseError): + p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value anddd exports:c == self:test_value2 ]'}, SETTINGS, '') + + def test_value_expr_invquery(self): + e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} + p = Parameters({'exp': '$[ exports:a ]'}, SETTINGS, '') + r = {'exp': {'node1': 1, 'node2': 3}} + p.interpolate(e) + self.assertEqual(p.as_dict(), r) + + def test_if_expr_invquery(self): + e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} + p = Parameters({'exp': '$[ exports:a if exports:b == 4 ]'}, SETTINGS, '') + r = {'exp': {'node2': 3}} + p.interpolate(e) + self.assertEqual(p.as_dict(), r) + + def test_if_expr_invquery_with_refs(self): + e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} + p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value ]', 'test_value': 2}, SETTINGS, '') + r = {'exp': {'node1': 1}, 'test_value': 2} + p.interpolate(e) + self.assertEqual(p.as_dict(), r) + + def test_list_if_expr_invquery(self): + e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2}} + p = Parameters({'exp': '$[ if exports:b == 2 ]'}, SETTINGS, '') + r1 = {'exp': ['node1', 'node3']} + r2 = {'exp': ['node3', 'node1']} + p.interpolate(e) + self.assertIn(p.as_dict(), [ r1, r2 ]) + + def test_if_expr_invquery_wth_and(self): + e = {'node1': {'a': 1, 'b': 4, 'c': False}, 'node2': {'a': 3, 'b': 4, 'c': True}} + p = Parameters({'exp': '$[ exports:a if exports:b == 4 and exports:c == True ]'}, SETTINGS, '') + r = {'exp': {'node2': 3}} + p.interpolate(e) + self.assertEqual(p.as_dict(), r) + + def test_if_expr_invquery_wth_or(self): + e = {'node1': {'a': 1, 'b': 4}, 'node2': {'a': 3, 'b': 3}} + p = Parameters({'exp': '$[ exports:a if exports:b == 4 or exports:b == 3 ]'}, SETTINGS, '') + r = {'exp': {'node1': 1, 'node2': 3}} + p.interpolate(e) + self.assertEqual(p.as_dict(), r) + + def test_list_if_expr_invquery_with_and(self): + e = {'node1': {'a': 1, 'b': 2, 'c': 'green'}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2, 'c': 'red'}} + p = Parameters({'exp': '$[ if exports:b == 2 and exports:c == green ]'}, SETTINGS, '') + r = {'exp': ['node1']} + p.interpolate(e) + self.assertEqual(p.as_dict(), r) + + def test_list_if_expr_invquery_with_and_missing(self): + inventory = {'node1': {'a': 1, 'b': 2, 'c': 'green'}, + 'node2': {'a': 3, 'b': 3}, + 'node3': {'a': 3, 'b': 2}} + mapping = {'exp': '$[ if exports:b == 2 and exports:c == green ]'} + expected = {'exp': ['node1']} + + pars = Parameters(mapping, SETTINGS, '') + pars.interpolate(inventory) + + self.assertEqual(pars.as_dict(), expected) + + def test_list_if_expr_invquery_with_and(self): + e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 4}} + p = Parameters({'exp': '$[ if exports:b == 2 or exports:b == 4 ]'}, SETTINGS, '') + r1 = {'exp': ['node1', 'node3']} + r2 = {'exp': ['node3', 'node1']} + p.interpolate(e) + self.assertIn(p.as_dict(), [ r1, r2 ]) + + def test_merging_inv_queries(self): + e = {'node1': {'a': 1}, 'node2': {'a': 1}, 'node3': {'a': 2}} + p1 = Parameters({'exp': '$[ if exports:a == 1 ]'}, SETTINGS, '') + p2 = Parameters({'exp': '$[ if exports:a == 2 ]'}, SETTINGS, '') + r = { 'exp': [ 'node1', 'node2', 'node3' ] } + p1.merge(p2) + p1.interpolate(e) + self.assertEqual(p1.as_dict(), r) + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/datatypes/tests/test_parameters.py b/reclass/datatypes/tests/test_parameters.py index 51006391..80fd8de1 100644 --- a/reclass/datatypes/tests/test_parameters.py +++ b/reclass/datatypes/tests/test_parameters.py @@ -6,25 +6,54 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import copy + +from six import iteritems + +from reclass.settings import Settings from reclass.datatypes import Parameters -from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS -from reclass.errors import InfiniteRecursionError +from reclass.utils.parameterdict import ParameterDict +from reclass.utils.parameterlist import ParameterList +from reclass.values.value import Value +from reclass.values.valuelist import ValueList +from reclass.values.scaitem import ScaItem +from reclass.errors import ChangedConstantError, InfiniteRecursionError, InterpolationError, ResolveError, ResolveErrorList, TypeMergeError import unittest + try: import unittest.mock as mock except ImportError: import mock SIMPLE = {'one': 1, 'two': 2, 'three': 3} +SETTINGS = Settings() + +class MockDevice(object): + def __init__(self): + self._text = '' + + def write(self, s): + self._text += s + return + + def text(self): + return self._text class TestParameters(unittest.TestCase): - def _construct_mocked_params(self, iterable=None, delimiter=None): - p = Parameters(iterable, delimiter) + def _construct_mocked_params(self, iterable=None, settings=SETTINGS): + p = Parameters(iterable, settings, '') self._base = base = p._base - p._base = mock.MagicMock(spec_set=dict, wraps=base) + p._base = mock.MagicMock(spec_set=ParameterDict, wraps=base) p._base.__repr__ = mock.MagicMock(autospec=dict.__repr__, return_value=repr(base)) + p._base.__getitem__.side_effect = base.__getitem__ + p._base.__setitem__.side_effect = base.__setitem__ return p, p._base def test_len_empty(self): @@ -44,22 +73,13 @@ def test_constructor(self): def test_repr_empty(self): p, b = self._construct_mocked_params() b.__repr__.return_value = repr({}) - self.assertEqual('%r' % p, '%s(%r, %r)' % (p.__class__.__name__, {}, - Parameters.DEFAULT_PATH_DELIMITER)) + self.assertEqual('%r' % p, '%s(%r)' % (p.__class__.__name__, {})) b.__repr__.assert_called_once_with() def test_repr(self): p, b = self._construct_mocked_params(SIMPLE) b.__repr__.return_value = repr(SIMPLE) - self.assertEqual('%r' % p, '%s(%r, %r)' % (p.__class__.__name__, SIMPLE, - Parameters.DEFAULT_PATH_DELIMITER)) - b.__repr__.assert_called_once_with() - - def test_repr_delimiter(self): - delim = '%' - p, b = self._construct_mocked_params(SIMPLE, delim) - b.__repr__.return_value = repr(SIMPLE) - self.assertEqual('%r' % p, '%s(%r, %r)' % (p.__class__.__name__, SIMPLE, delim)) + self.assertEqual('%r' % p, '%s(%r)' % (p.__class__.__name__, SIMPLE)) b.__repr__.assert_called_once_with() def test_equal_empty(self): @@ -71,8 +91,7 @@ def test_equal_empty(self): def test_equal_default_delimiter(self): p1, b1 = self._construct_mocked_params(SIMPLE) - p2, b2 = self._construct_mocked_params(SIMPLE, - Parameters.DEFAULT_PATH_DELIMITER) + p2, b2 = self._construct_mocked_params(SIMPLE, SETTINGS) b1.__eq__.return_value = True self.assertEqual(p1, p2) b1.__eq__.assert_called_once_with(b2) @@ -92,8 +111,10 @@ def test_unequal_content(self): b1.__eq__.assert_called_once_with(b2) def test_unequal_delimiter(self): - p1, b1 = self._construct_mocked_params(delimiter=':') - p2, b2 = self._construct_mocked_params(delimiter='%') + settings1 = Settings({'delimiter': ':'}) + settings2 = Settings({'delimiter': '%'}) + p1, b1 = self._construct_mocked_params(settings=settings1) + p2, b2 = self._construct_mocked_params(settings=settings2) b1.__eq__.return_value = False self.assertNotEqual(p1, p2) b1.__eq__.assert_called_once_with(b2) @@ -104,16 +125,21 @@ def test_unequal_types(self): self.assertEqual(b1.__eq__.call_count, 0) def test_construct_wrong_type(self): - with self.assertRaises(TypeError): - self._construct_mocked_params('wrong type') + with self.assertRaises(TypeError) as e: + self._construct_mocked_params(str('wrong type')) + self.assertIn(str(e.exception), [ "Cannot merge objects into Parameters", # python 2 + "Cannot merge objects into Parameters" ]) # python 3 def test_merge_wrong_type(self): p, b = self._construct_mocked_params() - with self.assertRaises(TypeError): - p.merge('wrong type') + with self.assertRaises(TypeError) as e: + p.merge(str('wrong type')) + self.assertIn(str(e.exception), [ "Cannot merge objects into Parameters", # python 2 + "Cannot merge objects into Parameters"]) # python 3 def test_get_dict(self): p, b = self._construct_mocked_params(SIMPLE) + p.initialise_interpolation() self.assertDictEqual(p.as_dict(), SIMPLE) def test_merge_scalars(self): @@ -121,33 +147,33 @@ def test_merge_scalars(self): mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)} p2, b2 = self._construct_mocked_params(mergee) p1.merge(p2) - for key, value in mergee.iteritems(): - # check that each key, value in mergee resulted in a get call and - # a __setitem__ call against b1 (the merge target) - self.assertIn(mock.call(key), b1.get.call_args_list) - self.assertIn(mock.call(key, value), b1.__setitem__.call_args_list) + self.assertEqual(b1.get.call_count, 4) + self.assertEqual(b1.__setitem__.call_count, 4) def test_stray_occurrence_overwrites_during_interpolation(self): - p1 = Parameters({'r' : mock.sentinel.ref, 'b': '${r}'}) - p2 = Parameters({'b' : mock.sentinel.goal}) + p1 = Parameters({'r' : mock.sentinel.ref, 'b': '${r}'}, SETTINGS, '') + p2 = Parameters({'b' : mock.sentinel.goal}, SETTINGS, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['b'], mock.sentinel.goal) + class TestParametersNoMock(unittest.TestCase): def test_merge_scalars(self): - p = Parameters(SIMPLE) + p = Parameters(SIMPLE, SETTINGS, '') mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)} p.merge(mergee) + p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), goal) def test_merge_scalars_overwrite(self): - p = Parameters(SIMPLE) + p = Parameters(SIMPLE, SETTINGS, '') mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)} p.merge(mergee) + p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), goal) @@ -155,35 +181,162 @@ def test_merge_scalars_overwrite(self): def test_merge_lists(self): l1 = [1,2,3] l2 = [2,3,4] - p1 = Parameters(dict(list=l1[:])) - p2 = Parameters(dict(list=l2)) + p1 = Parameters(dict(list=l1[:]), SETTINGS, '') + p2 = Parameters(dict(list=l2), SETTINGS, '') p1.merge(p2) + p1.initialise_interpolation() self.assertListEqual(p1.as_dict()['list'], l1+l2) def test_merge_list_into_scalar(self): l = ['foo', 1, 2] - p1 = Parameters(dict(key=l[0])) - p1.merge(Parameters(dict(key=l[1:]))) + p1 = Parameters(dict(key=l[0]), SETTINGS, '') + p2 = Parameters(dict(key=l[1:]), SETTINGS, '') + with self.assertRaises(TypeMergeError) as e: + p1.merge(p2) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Cannot merge list over scalar, at key, in ; ") + + def test_merge_list_into_scalar_allow(self): + settings = Settings({'allow_list_over_scalar': True}) + l = ['foo', 1, 2] + p1 = Parameters(dict(key=l[0]), settings, '') + p2 = Parameters(dict(key=l[1:]), settings, '') + p1.merge(p2) + p1.interpolate() self.assertListEqual(p1.as_dict()['key'], l) def test_merge_scalar_over_list(self): l = ['foo', 1, 2] - p1 = Parameters(dict(key=l[:2])) - p1.merge(Parameters(dict(key=l[2]))) + p1 = Parameters(dict(key=l[:2]), SETTINGS, '') + p2 = Parameters(dict(key=l[2]), SETTINGS, '') + with self.assertRaises(TypeMergeError) as e: + p1.merge(p2) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over list, at key, in ; ") + + def test_merge_scalar_over_list_allow(self): + l = ['foo', 1, 2] + settings = Settings({'allow_scalar_over_list': True}) + p1 = Parameters(dict(key=l[:2]), settings, '') + p2 = Parameters(dict(key=l[2]), settings, '') + p1.merge(p2) + p1.interpolate() self.assertEqual(p1.as_dict()['key'], l[2]) + def test_merge_none_over_list(self): + l = ['foo', 1, 2] + settings = Settings({'allow_none_override': False}) + p1 = Parameters(dict(key=l[:2]), settings, '') + p2 = Parameters(dict(key=None), settings, '') + with self.assertRaises(TypeMergeError) as e: + p1.merge(p2) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over list, at key, in ; ") + + def test_merge_none_over_list_allow(self): + l = ['foo', 1, 2] + settings = Settings({'allow_none_override': True}) + p1 = Parameters(dict(key=l[:2]), settings, '') + p2 = Parameters(dict(key=None), settings, '') + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict()['key'], None) + + def test_merge_dict_over_scalar(self): + d = { 'one': 1, 'two': 2 } + p1 = Parameters({ 'a': 1 }, SETTINGS, '') + p2 = Parameters({ 'a': d }, SETTINGS, '') + with self.assertRaises(TypeMergeError) as e: + p1.merge(p2) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Cannot merge dictionary over scalar, at a, in ; ") + + def test_merge_dict_over_scalar_allow(self): + settings = Settings({'allow_dict_over_scalar': True}) + d = { 'one': 1, 'two': 2 } + p1 = Parameters({ 'a': 1 }, settings, '') + p2 = Parameters({ 'a': d }, settings, '') + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), { 'a': d }) + + def test_merge_scalar_over_dict(self): + d = { 'one': 1, 'two': 2} + p1 = Parameters({ 'a': d }, SETTINGS, '') + p2 = Parameters({ 'a': 1 }, SETTINGS, '') + with self.assertRaises(TypeMergeError) as e: + p1.merge(p2) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over dictionary, at a, in ; ") + + def test_merge_scalar_over_dict_allow(self): + d = { 'one': 1, 'two': 2} + settings = Settings({'allow_scalar_over_dict': True}) + p1 = Parameters({ 'a': d }, settings, '') + p2 = Parameters({ 'a': 1 }, settings, '') + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), { 'a': 1}) + + def test_merge_none_over_dict(self): + p1 = Parameters(dict(key=SIMPLE), SETTINGS, '') + p2 = Parameters(dict(key=None), SETTINGS, '') + with self.assertRaises(TypeMergeError) as e: + p1.merge(p2) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over dictionary, at key, in ; ") + + def test_merge_none_over_dict_allow(self): + settings = Settings({'allow_none_override': True}) + p1 = Parameters(dict(key=SIMPLE), settings, '') + p2 = Parameters(dict(key=None), settings, '') + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict()['key'], None) + + def test_merge_list_over_dict(self): + p1 = Parameters({}, SETTINGS, '') + p2 = Parameters({'one': { 'a': { 'b': 'c' } } }, SETTINGS, 'second') + p3 = Parameters({'one': { 'a': [ 'b' ] } }, SETTINGS, 'third') + with self.assertRaises(TypeMergeError) as e: + p1.merge(p2) + p1.merge(p3) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Cannot merge list over dictionary, at one:a, in second; third") + + # def test_merge_bare_dict_over_dict(self): + # settings = Settings({'allow_bare_override': True}) + # p1 = Parameters(dict(key=SIMPLE), settings, '') + # p2 = Parameters(dict(key=dict()), settings, '') + # p1.merge(p2) + # p1.initialise_interpolation() + # self.assertEqual(p1.as_dict()['key'], {}) + + # def test_merge_bare_list_over_list(self): + # l = ['foo', 1, 2] + # settings = Settings({'allow_bare_override': True}) + # p1 = Parameters(dict(key=l), settings, '') + # p2 = Parameters(dict(key=list()), settings, '') + # p1.merge(p2) + # p1.initialise_interpolation() + # self.assertEqual(p1.as_dict()['key'], []) + def test_merge_dicts(self): mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)} - p = Parameters(dict(dict=SIMPLE)) - p.merge(Parameters(dict(dict=mergee))) + p = Parameters(dict(dict=SIMPLE), SETTINGS, '') + p2 = Parameters(dict(dict=mergee), SETTINGS, '') + p.merge(p2) + p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), dict(dict=goal)) def test_merge_dicts_overwrite(self): mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)} - p = Parameters(dict(dict=SIMPLE)) - p.merge(Parameters(dict(dict=mergee))) + p = Parameters(dict(dict=SIMPLE), SETTINGS, '') + p2 = Parameters(dict(dict=mergee), SETTINGS, '') + p.merge(p2) + p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), dict(dict=goal)) @@ -196,62 +349,475 @@ def test_merge_dicts_override(self): 'two': ['delta']} goal = {'one': {'a': 'alpha'}, 'two': ['gamma']} - p = Parameters(dict(dict=base)) - p.merge(Parameters(dict(dict=mergee))) + p = Parameters(dict(dict=base), SETTINGS, '') + p2 = Parameters(dict(dict=mergee), SETTINGS, '') + p.merge(p2) + p.interpolate() self.assertDictEqual(p.as_dict(), dict(dict=goal)) - def test_merge_dict_into_scalar(self): - p = Parameters(dict(base='foo')) - with self.assertRaises(TypeError): - p.merge(Parameters(dict(base=SIMPLE))) - - def test_merge_scalar_over_dict(self): - p = Parameters(dict(base=SIMPLE)) - mergee = {'base':'foo'} - p.merge(Parameters(mergee)) - self.assertDictEqual(p.as_dict(), mergee) - def test_interpolate_single(self): v = 42 - d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS), + d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), 'bar': v} - p = Parameters(d) + p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_multiple(self): v = '42' - d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS) + 'meep'.join(PARAMETER_INTERPOLATION_SENTINELS), + d = {'foo': 'bar'.join(SETTINGS.reference_sentinels) + 'meep'.join(SETTINGS.reference_sentinels), 'bar': v[0], 'meep': v[1]} - p = Parameters(d) + p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_multilevel(self): v = 42 - d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS), - 'bar': 'meep'.join(PARAMETER_INTERPOLATION_SENTINELS), + d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), + 'bar': 'meep'.join(SETTINGS.reference_sentinels), 'meep': v} - p = Parameters(d) + p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_list(self): - l = [41,42,43] - d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS), + l = [41, 42, 43] + d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), 'bar': l} - p = Parameters(d) + p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], l) def test_interpolate_infrecursion(self): v = 42 - d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS), - 'bar': 'foo'.join(PARAMETER_INTERPOLATION_SENTINELS)} - p = Parameters(d) - with self.assertRaises(InfiniteRecursionError): + d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), + 'bar': 'foo'.join(SETTINGS.reference_sentinels)} + p = Parameters(d, SETTINGS, '') + with self.assertRaises(InfiniteRecursionError) as e: p.interpolate() + # interpolation can start with foo or bar + self.assertIn(e.exception.message, [ "-> \n Infinite recursion: ${foo}, at bar", + "-> \n Infinite recursion: ${bar}, at foo"]) + + def test_nested_references(self): + d = {'a': '${${z}}', 'b': 2, 'z': 'b'} + r = {'a': 2, 'b': 2, 'z': 'b'} + p = Parameters(d, SETTINGS, '') + p.interpolate() + self.assertEqual(p.as_dict(), r) + + def test_nested_deep_references(self): + d = {'one': { 'a': 1, 'b': '${one:${one:c}}', 'c': 'a' } } + r = {'one': { 'a': 1, 'b': 1, 'c': 'a'} } + p = Parameters(d, SETTINGS, '') + p.interpolate() + self.assertEqual(p.as_dict(), r) + + def test_stray_occurrence_overwrites_during_interpolation(self): + p1 = Parameters({'r' : 1, 'b': '${r}'}, SETTINGS, '') + p2 = Parameters({'b' : 2}, SETTINGS, '') + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict()['b'], 2) + + def test_referenced_dict_deep_overwrite(self): + p1 = Parameters({'alpha': {'one': {'a': 1, 'b': 2} } }, SETTINGS, '') + p2 = Parameters({'beta': '${alpha}'}, SETTINGS, '') + p3 = Parameters({'alpha': {'one': {'c': 3, 'd': 4} }, + 'beta': {'one': {'a': 99} } }, SETTINGS, '') + r = {'alpha': {'one': {'a':1, 'b': 2, 'c': 3, 'd':4} }, + 'beta': {'one': {'a':99, 'b': 2, 'c': 3, 'd':4} } } + p1.merge(p2) + p1.merge(p3) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_complex_reference_overwriting(self): + p1 = Parameters({'one': 'abc_123_${two}_${three}', 'two': 'XYZ', 'four': 4}, SETTINGS, '') + p2 = Parameters({'one': 'QWERTY_${three}_${four}', 'three': '999'}, SETTINGS, '') + r = {'one': 'QWERTY_999_4', 'two': 'XYZ', 'three': '999', 'four': 4} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_nested_reference_with_overwriting(self): + p1 = Parameters({'one': {'a': 1, 'b': 2, 'z': 'a'}, + 'two': '${one:${one:z}}' }, SETTINGS, '') + p2 = Parameters({'one': {'z': 'b'} }, SETTINGS, '') + r = {'one': {'a': 1, 'b':2, 'z': 'b'}, 'two': 2} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_merge_referenced_lists(self): + p1 = Parameters({'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6 ], 'three': '${one}'}, SETTINGS, '') + p2 = Parameters({'three': '${two}'}, SETTINGS, '') + r = {'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6], 'three': [ 1, 2, 3, 4, 5, 6 ]} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_merge_referenced_dicts(self): + p1 = Parameters({'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': '${one}'}, SETTINGS, '') + p2 = Parameters({'three': '${two}'}, SETTINGS, '') + r = {'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': {'a': 1, 'b': 2, 'c': 3, 'd': 4}} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_deep_refs_in_referenced_dicts(self): + p = Parameters({'A': '${C:a}', 'B': {'a': 1, 'b': 2}, 'C': '${B}'}, SETTINGS, '') + r = {'A': 1, 'B': {'a': 1, 'b': 2}, 'C': {'a': 1, 'b': 2}} + p.interpolate() + self.assertEqual(p.as_dict(), r) + + def test_overwrite_none(self): + p1 = Parameters({'A': None, 'B': None, 'C': None, 'D': None, 'E': None, 'F': None}, SETTINGS, '') + p2 = Parameters({'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': '${A}', 'E': '${B}', 'F': '${C}'}, SETTINGS, '') + r = {'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': 'abc', 'E': [1, 2, 3], 'F': {'a': 'aaa', 'b': 'bbb'}} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_overwrite_dict(self): + p1 = Parameters({'a': { 'one': 1, 'two': 2 }}, SETTINGS, '') + p2 = Parameters({'~a': { 'three': 3, 'four': 4 }}, SETTINGS, '') + r = {'a': { 'three': 3, 'four': 4 }} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_overwrite_list(self): + p1 = Parameters({'a': [1, 2]}, SETTINGS, '') + p2 = Parameters({'~a': [3, 4]}, SETTINGS, '') + r = {'a': [3, 4]} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_interpolate_escaping(self): + v = 'bar'.join(SETTINGS.reference_sentinels) + d = {'foo': SETTINGS.escape_character + 'bar'.join(SETTINGS.reference_sentinels), + 'bar': 'unused'} + p = Parameters(d, SETTINGS, '') + p.initialise_interpolation() + self.assertEqual(p.as_dict()['foo'], v) + + def test_interpolate_double_escaping(self): + v = SETTINGS.escape_character + 'meep' + d = {'foo': SETTINGS.escape_character + SETTINGS.escape_character + 'bar'.join(SETTINGS.reference_sentinels), + 'bar': 'meep'} + p = Parameters(d, SETTINGS, '') + p.interpolate() + self.assertEqual(p.as_dict()['foo'], v) + + def test_interpolate_escaping_backwards_compatibility(self): + """In all following cases, escaping should not happen and the escape character + needs to be printed as-is, to ensure backwards compatibility to older versions.""" + v = ' '.join([ + # Escape character followed by unescapable character + '1', SETTINGS.escape_character, + # Escape character followed by escape character + '2', SETTINGS.escape_character + SETTINGS.escape_character, + # Escape character followed by interpolation end sentinel + '3', SETTINGS.escape_character + SETTINGS.reference_sentinels[1], + # Escape character at the end of the string + '4', SETTINGS.escape_character + ]) + d = {'foo': v} + p = Parameters(d, SETTINGS, '') + p.initialise_interpolation() + self.assertEqual(p.as_dict()['foo'], v) + + def test_escape_close_in_ref(self): + p1 = Parameters({'one}': 1, 'two': '${one\\}}'}, SETTINGS, '') + r = {'one}': 1, 'two': 1} + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_double_escape_in_ref(self): + d = {'one\\': 1, 'two': '${one\\\\}'} + p1 = Parameters(d, SETTINGS, '') + r = {'one\\': 1, 'two': 1} + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_merging_for_multiple_nodes(self): + p1 = Parameters({ 'alpha': { 'one': 111 }}, SETTINGS, '') + p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '') + p3 = Parameters({ 'beta': {'two': 222 }}, SETTINGS, '') + n1 = Parameters({ 'name': 'node1'}, SETTINGS, '') + r1 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 111 }, 'name': 'node1' } + r2 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 222 }, 'name': 'node2' } + n1.merge(p1) + n1.merge(p2) + n1.interpolate() + n2 = Parameters({'name': 'node2'}, SETTINGS, '') + n2.merge(p1) + n2.merge(p2) + n2.merge(p3) + n2.interpolate() + self.assertEqual(n1.as_dict(), r1) + self.assertEqual(n2.as_dict(), r2) + + def test_list_merging_for_multiple_nodes(self): + p1 = Parameters({ 'alpha': { 'one': [1, 2] }}, SETTINGS, '') + p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '') + p3 = Parameters({ 'beta': {'two': [3] }}, SETTINGS, '') + n1 = Parameters({ 'name': 'node1'}, SETTINGS, '') + r1 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2] }, 'name': 'node1' } + r2 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2, 3] }, 'name': 'node2' } + n1.merge(p1) + n1.merge(p2) + n1.interpolate() + n2 = Parameters({'name': 'node2'}, SETTINGS, '') + n2.merge(p1) + n2.merge(p2) + n2.merge(p3) + n2.interpolate() + self.assertEqual(n1.as_dict(), r1) + self.assertEqual(n2.as_dict(), r2) + + def test_dict_merging_for_multiple_nodes(self): + p1 = Parameters({ 'alpha': { 'one': { 'a': 'aa', 'b': 'bb' }}}, SETTINGS, '') + p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '') + p3 = Parameters({ 'beta': {'two': {'c': 'cc' }}}, SETTINGS, '') + n1 = Parameters({ 'name': 'node1'}, SETTINGS, '') + r1 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb'} }, 'name': 'node1' } + r2 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb', 'c': 'cc'} }, 'name': 'node2' } + n1.merge(p1) + n1.merge(p2) + n1.interpolate() + n2 = Parameters({'name': 'node2'}, SETTINGS, '') + n2.merge(p1) + n2.merge(p2) + n2.merge(p3) + n2.interpolate() + self.assertEqual(n1.as_dict(), r1) + self.assertEqual(n2.as_dict(), r2) + + def test_list_merging_with_refs_for_multiple_nodes(self): + p1 = Parameters({ 'alpha': { 'one': [1, 2], 'two': [3, 4] }}, SETTINGS, '') + p2 = Parameters({ 'beta': { 'three': '${alpha:one}' }}, SETTINGS, '') + p3 = Parameters({ 'beta': { 'three': '${alpha:two}' }}, SETTINGS, '') + p4 = Parameters({ 'beta': { 'three': '${alpha:one}' }}, SETTINGS, '') + n1 = Parameters({ 'name': 'node1' }, SETTINGS, '') + r1 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2]}, 'name': 'node1'} + r2 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2, 3, 4, 1, 2]}, 'name': 'node2'} + n2 = Parameters({ 'name': 'node2' }, SETTINGS, '') + n2.merge(p1) + n2.merge(p2) + n2.merge(p3) + n2.merge(p4) + n2.interpolate() + n1.merge(p1) + n1.merge(p2) + n1.interpolate() + self.assertEqual(n1.as_dict(), r1) + self.assertEqual(n2.as_dict(), r2) + + def test_nested_refs_with_multiple_nodes(self): + p1 = Parameters({ 'alpha': { 'one': 1, 'two': 2 } }, SETTINGS, '') + p2 = Parameters({ 'beta': { 'three': 'one' } }, SETTINGS, '') + p3 = Parameters({ 'beta': { 'three': 'two' } }, SETTINGS, '') + p4 = Parameters({ 'beta': { 'four': '${alpha:${beta:three}}' } }, SETTINGS, '') + n1 = Parameters({ 'name': 'node1' }, SETTINGS, '') + r1 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'one', 'four': 1}, 'name': 'node1'} + r2 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'two', 'four': 2}, 'name': 'node2'} + n1.merge(p1) + n1.merge(p4) + n1.merge(p2) + n1.interpolate() + n2 = Parameters({ 'name': 'node2' }, SETTINGS, '') + n2.merge(p1) + n2.merge(p4) + n2.merge(p3) + n2.interpolate() + self.assertEqual(n1.as_dict(), r1) + self.assertEqual(n2.as_dict(), r2) + + def test_nested_refs_error_message(self): + # beta is missing, oops + p1 = Parameters({'alpha': {'one': 1, 'two': 2}, 'gamma': '${alpha:${beta}}'}, SETTINGS, '') + with self.assertRaises(InterpolationError) as error: + p1.interpolate() + self.assertEqual(error.exception.message, "-> \n Bad references, at gamma\n ${beta}") + + def test_multiple_resolve_errors(self): + p1 = Parameters({'alpha': '${gamma}', 'beta': '${gamma}'}, SETTINGS, '') + with self.assertRaises(ResolveErrorList) as error: + p1.interpolate() + # interpolation can start with either alpha or beta + self.assertIn(error.exception.message, [ "-> \n Cannot resolve ${gamma}, at alpha\n Cannot resolve ${gamma}, at beta", + "-> \n Cannot resolve ${gamma}, at beta\n Cannot resolve ${gamma}, at alpha"]) + + def test_force_single_resolve_error(self): + settings = copy.deepcopy(SETTINGS) + settings.group_errors = False + p1 = Parameters({'alpha': '${gamma}', 'beta': '${gamma}'}, settings, '') + with self.assertRaises(ResolveError) as error: + p1.interpolate() + # interpolation can start with either alpha or beta + self.assertIn(error.exception.message, [ "-> \n Cannot resolve ${gamma}, at alpha", + "-> \n Cannot resolve ${gamma}, at beta"]) + + def test_ignore_overwriten_missing_reference(self): + settings = copy.deepcopy(SETTINGS) + settings.ignore_overwritten_missing_references = True + p1 = Parameters({'alpha': '${beta}'}, settings, '') + p2 = Parameters({'alpha': '${gamma}'}, settings, '') + p3 = Parameters({'gamma': 3}, settings, '') + r1 = {'alpha': 3, 'gamma': 3} + p1.merge(p2) + p1.merge(p3) + err1 = "[WARNING] Reference '${beta}' undefined\n" + with mock.patch('sys.stderr', new=MockDevice()) as std_err: + p1.interpolate() + self.assertEqual(p1.as_dict(), r1) + self.assertEqual(std_err.text(), err1) + + def test_ignore_overwriten_missing_reference_last_value(self): + # an error should be raised if the last reference to be merged + # is missing even if ignore_overwritten_missing_references is true + settings = copy.deepcopy(SETTINGS) + settings.ignore_overwritten_missing_references = True + p1 = Parameters({'alpha': '${gamma}'}, settings, '') + p2 = Parameters({'alpha': '${beta}'}, settings, '') + p3 = Parameters({'gamma': 3}, settings, '') + p1.merge(p2) + p1.merge(p3) + with self.assertRaises(InterpolationError) as error: + p1.interpolate() + self.assertEqual(error.exception.message, "-> \n Cannot resolve ${beta}, at alpha") + + def test_ignore_overwriten_missing_reference_dict(self): + # setting ignore_overwritten_missing_references to true should + # not change the behaviour for dicts + settings = copy.deepcopy(SETTINGS) + settings.ignore_overwritten_missing_references = True + p1 = Parameters({'alpha': '${beta}'}, settings, '') + p2 = Parameters({'alpha': '${gamma}'}, settings, '') + p3 = Parameters({'gamma': {'one': 1, 'two': 2}}, settings, '') + err1 = "[WARNING] Reference '${beta}' undefined\n" + p1.merge(p2) + p1.merge(p3) + with self.assertRaises(InterpolationError) as error, mock.patch('sys.stderr', new=MockDevice()) as std_err: + p1.interpolate() + self.assertEqual(error.exception.message, "-> \n Cannot resolve ${beta}, at alpha") + self.assertEqual(std_err.text(), err1) + + def test_escaped_string_in_ref_dict_1(self): + # test with escaped string in first dict to be merged + p1 = Parameters({'a': { 'one': '${a_ref}' }, 'b': { 'two': '\${not_a_ref}' }, 'c': '${b}', 'a_ref': 123}, SETTINGS, '') + p2 = Parameters({'c': '${a}'}, SETTINGS, '') + r = { 'a': { 'one': 123 }, 'b': { 'two': '${not_a_ref}' }, 'c': { 'one': 123, 'two': '${not_a_ref}' }, 'a_ref': 123} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_escaped_string_in_ref_dict_2(self): + # test with escaped string in second dict to be merged + p1 = Parameters({'a': { 'one': '${a_ref}' }, 'b': { 'two': '\${not_a_ref}' }, 'c': '${a}', 'a_ref': 123}, SETTINGS, '') + p2 = Parameters({'c': '${b}'}, SETTINGS, '') + r = { 'a': { 'one': 123 }, 'b': { 'two': '${not_a_ref}' }, 'c': { 'one': 123, 'two': '${not_a_ref}' }, 'a_ref': 123} + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_complex_overwrites_1(self): + # find a better name for this test + p1 = Parameters({ 'test': { 'dict': { 'a': '${values:one}', 'b': '${values:two}' } }, + 'values': { 'one': 1, 'two': 2, 'three': { 'x': 'X', 'y': 'Y' } } }, SETTINGS, '') + p2 = Parameters({ 'test': { 'dict': { 'c': '${values:two}' } } }, SETTINGS, '') + p3 = Parameters({ 'test': { 'dict': { '~b': '${values:three}' } } }, SETTINGS, '') + r = {'test': {'dict': {'a': 1, 'b': {'x': 'X', 'y': 'Y'}, 'c': 2}}, 'values': {'one': 1, 'three': {'x': 'X', 'y': 'Y'}, 'two': 2} } + p2.merge(p3) + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_escaped_string_overwrites(self): + p1 = Parameters({ 'test': '\${not_a_ref}' }, SETTINGS, '') + p2 = Parameters({ 'test': '\${also_not_a_ref}' }, SETTINGS, '') + r = { 'test': '${also_not_a_ref}' } + p1.merge(p2) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_escaped_string_in_ref_dict_overwrite(self): + p1 = Parameters({'a': { 'one': '\${not_a_ref}' }, 'b': { 'two': '\${also_not_a_ref}' }}, SETTINGS, '') + p2 = Parameters({'c': '${a}'}, SETTINGS, '') + p3 = Parameters({'c': '${b}'}, SETTINGS, '') + p4 = Parameters({'c': { 'one': '\${again_not_a_ref}' } }, SETTINGS, '') + r = {'a': {'one': '${not_a_ref}'}, 'b': {'two': '${also_not_a_ref}'}, 'c': {'one': '${again_not_a_ref}', 'two': '${also_not_a_ref}'}} + p1.merge(p2) + p1.merge(p3) + p1.merge(p4) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_strict_constant_parameter(self): + p1 = Parameters({'one': { 'a': 1} }, SETTINGS, 'first') + p2 = Parameters({'one': { '=a': 2} }, SETTINGS, 'second') + p3 = Parameters({'one': { 'a': 3} }, SETTINGS, 'third') + with self.assertRaises(ChangedConstantError) as e: + p1.merge(p2) + p1.merge(p3) + p1.interpolate() + self.assertEqual(e.exception.message, "-> \n Attempt to change constant value, at one:a, in second; third") + + def test_constant_parameter(self): + settings = Settings({'strict_constant_parameters': False}) + p1 = Parameters({'one': { 'a': 1} }, settings, 'first') + p2 = Parameters({'one': { '=a': 2} }, settings, 'second') + p3 = Parameters({'one': { 'a': 3} }, settings, 'third') + r = {'one': { 'a': 2 } } + p1.merge(p2) + p1.merge(p3) + p1.interpolate() + self.assertEqual(p1.as_dict(), r) + + def test_interpolated_list_type(self): + p1 = Parameters({'a': [ 1, 2, 3 ]}, SETTINGS, 'first') + r = {'a': [ 1, 2, 3 ]} + self.assertIs(type(p1.as_dict()['a']), ParameterList) + p1.interpolate() + self.assertIs(type(p1.as_dict()['a']), list) + self.assertEqual(p1.as_dict(), r) + + def test_interpolated_dict_type(self): + p1 = Parameters({'a': { 'one': 1, 'two': 2, 'three': 3 }}, SETTINGS, 'first') + r = {'a': { 'one': 1, 'two': 2, 'three': 3 }} + self.assertIs(type(p1.as_dict()['a']), ParameterDict) + p1.interpolate() + self.assertIs(type(p1.as_dict()['a']), dict) + self.assertEqual(p1.as_dict(), r) + + def test_merged_interpolated_list_type(self): + p1 = Parameters({'a': [ 1, 2, 3 ]}, SETTINGS, 'first') + p2 = Parameters({'a': [ 4, 5, 6 ]}, SETTINGS, 'second') + r = {'a': [ 1, 2, 3, 4, 5, 6 ]} + self.assertIs(type(p1.as_dict()['a']), ParameterList) + self.assertIs(type(p2.as_dict()['a']), ParameterList) + p1.merge(p2) + self.assertIs(type(p1.as_dict()['a']), ValueList) + p1.interpolate() + self.assertIs(type(p1.as_dict()['a']), list) + self.assertEqual(p1.as_dict(), r) + + def test_merged_interpolated_dict_type(self): + p1 = Parameters({'a': { 'one': 1, 'two': 2, 'three': 3 }}, SETTINGS, 'first') + p2 = Parameters({'a': { 'four': 4, 'five': 5, 'six': 6 }}, SETTINGS, 'second') + r = {'a': { 'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}} + self.assertIs(type(p1.as_dict()['a']), ParameterDict) + self.assertIs(type(p2.as_dict()['a']), ParameterDict) + p1.merge(p2) + self.assertIs(type(p1.as_dict()['a']), ParameterDict) + p1.interpolate() + self.assertIs(type(p1.as_dict()['a']), dict) + self.assertEqual(p1.as_dict(), r) + if __name__ == '__main__': unittest.main() diff --git a/reclass/defaults.py b/reclass/defaults.py index fb04c83a..f240f3f9 100644 --- a/reclass/defaults.py +++ b/reclass/defaults.py @@ -6,8 +6,13 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import os, sys -from version import RECLASS_NAME +from .version import RECLASS_NAME # defaults for the command-line options OPT_STORAGE_TYPE = 'yaml_fs' @@ -15,8 +20,27 @@ OPT_NODES_URI = 'nodes' OPT_CLASSES_URI = 'classes' OPT_PRETTY_PRINT = True +OPT_GROUP_ERRORS = True +OPT_COMPOSE_NODE_NAME = False +OPT_NO_REFS = False OPT_OUTPUT = 'yaml' +OPT_IGNORE_CLASS_NOTFOUND = False +OPT_IGNORE_CLASS_NOTFOUND_REGEXP = ['.*'] +OPT_IGNORE_CLASS_NOTFOUND_WARNING = True + +OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES = True +OPT_STRICT_CONSTANT_PARAMETERS = True + +OPT_ALLOW_SCALAR_OVER_DICT = False +OPT_ALLOW_SCALAR_OVER_LIST = False +OPT_ALLOW_LIST_OVER_SCALAR = False +OPT_ALLOW_DICT_OVER_SCALAR = False +OPT_ALLOW_NONE_OVERRIDE = False + +OPT_INVENTORY_IGNORE_FAILED_NODE = False +OPT_INVENTORY_IGNORE_FAILED_RENDER = False + CONFIG_FILE_SEARCH_PATH = [os.getcwd(), os.path.expanduser('~'), OPT_INVENTORY_BASE_URI, @@ -24,6 +48,12 @@ ] CONFIG_FILE_NAME = RECLASS_NAME + '-config.yml' -PARAMETER_INTERPOLATION_SENTINELS = ('${', '}') +REFERENCE_SENTINELS = ('${', '}') +EXPORT_SENTINELS = ('$[', ']') PARAMETER_INTERPOLATION_DELIMITER = ':' PARAMETER_DICT_KEY_OVERRIDE_PREFIX = '~' +PARAMETER_DICT_KEY_CONSTANT_PREFIX = '=' +ESCAPE_CHARACTER = '\\' + +AUTOMATIC_RECLASS_PARAMETERS = True +DEFAULT_ENVIRONMENT = 'base' diff --git a/reclass/errors.py b/reclass/errors.py index 5ce4d73c..24bdfaaa 100644 --- a/reclass/errors.py +++ b/reclass/errors.py @@ -6,23 +6,35 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals import posix, sys import traceback -from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS +from reclass.defaults import REFERENCE_SENTINELS, EXPORT_SENTINELS +from reclass.utils.dictpath import DictPath class ReclassException(Exception): - def __init__(self, rc=posix.EX_SOFTWARE, msg=None): + def __init__(self, rc=posix.EX_SOFTWARE, msg=None, tbFlag=True): super(ReclassException, self).__init__() self._rc = rc self._msg = msg - self._traceback = traceback.format_exc() + if tbFlag: + self._traceback = traceback.format_exc() + else: + self._traceback = None + self._full_traceback = False message = property(lambda self: self._get_message()) rc = property(lambda self: self._rc) + def __str__(self): + return self.message + '\n' + super(ReclassException, self).__str__() + def _get_message(self): if self._msg: return self._msg @@ -30,9 +42,14 @@ def _get_message(self): return 'No error message provided.' def exit_with_message(self, out=sys.stderr): - print >>out, self.message + if self._full_traceback: + t, v, tb = sys.exc_info() + print('Full Traceback', file=out) + for l in traceback.format_tb(tb): + print(l, file=out) if self._traceback: - print >>out, self._traceback + print(self._traceback, file=out) + print(self.message, file=out) sys.exit(self.rc) @@ -89,83 +106,214 @@ class NodeNotFound(NotFoundError): def __init__(self, storage, nodename, uri): super(NodeNotFound, self).__init__(msg=None) - self._storage = storage - self._name = nodename - self._uri = uri + self.storage = storage + self.name = nodename + self.uri = uri def _get_message(self): msg = "Node '{0}' not found under {1}://{2}" - return msg.format(self._name, self._storage, self._uri) + return msg.format(self.name, self.storage, self.uri) -class ClassNotFound(NotFoundError): +class InterpolationError(ReclassException): - def __init__(self, storage, classname, uri, nodename=None): - super(ClassNotFound, self).__init__(msg=None) - self._storage = storage - self._name = classname - self._uri = uri - self._nodename = nodename + def __init__(self, msg, rc=posix.EX_DATAERR, nodename='', uri=None, context=None, tbFlag=True): + super(InterpolationError, self).__init__(rc=rc, msg=msg, tbFlag=tbFlag) + self.nodename = nodename + self.uri = uri + self.context = context def _get_message(self): - if self._nodename: - msg = "Class '{0}' (in ancestry of node '{1}') not found " \ - "under {2}://{3}" - else: - msg = "Class '{0}' not found under {2}://{3}" - return msg.format(self._name, self._nodename, self._storage, self._uri) + msg = '-> {0}\n'.format(self.nodename) + msg += self._render_error_message(self._get_error_message(), 1) + msg = msg[:-1] + return msg + + def _render_error_message(self, message_list, indent): + msg = '' + for l in message_list: + if isinstance(l, list): + msg += self._render_error_message(l, indent + 1) + else: + msg += (' ' * indent * 3) + l + '\n' + return msg - def set_nodename(self, nodename): - self._nodename = nodename + def _add_context_and_uri(self): + msg = '' + if self.context: + msg += ', at %s' % str(self.context) + if self.uri: + msg += ', in %s' % self.uri + return msg -class InterpolationError(ReclassException): +class ClassNotFound(InterpolationError): - def __init__(self, msg, rc=posix.EX_DATAERR): - super(InterpolationError, self).__init__(rc=rc, msg=msg) + def __init__(self, storage, classname, path, nodename='', uri=None): + super(ClassNotFound, self).__init__(msg=None, uri=uri, nodename=nodename) + self.storage = storage + self.name = classname + self.path = path + def _get_error_message(self): + msg = [ 'In {0}'.format(self.uri), + 'Class {0} not found under {1}://{2}'.format(self.name, self.storage, self.path) ] + return msg -class UndefinedVariableError(InterpolationError): - def __init__(self, var, context=None): - super(UndefinedVariableError, self).__init__(msg=None) - self._var = var - self._context = context - var = property(lambda self: self._var) - context = property(lambda self: self._context) +class ClassNameResolveError(InterpolationError): + def __init__(self, classname, nodename, uri): + super(ClassNameResolveError, self).__init__(msg=None, uri=uri, nodename=nodename) + self.name = classname - def _get_message(self): - msg = "Cannot resolve " + self._var.join(PARAMETER_INTERPOLATION_SENTINELS) - if self._context: - msg += ' in the context of %s' % self._context + def _get_error_message(self): + msg = [ 'In {0}'.format(self.uri), + 'Class name {0} not resolvable'.format(self.name) ] return msg - def set_context(self, context): - self._context = context +class InvQueryClassNotFound(InterpolationError): -class IncompleteInterpolationError(InterpolationError): + def __init__(self, classNotFoundError, nodename=''): + super(InvQueryClassNotFound, self).__init__(msg=None, nodename=nodename) + self.classNotFoundError = classNotFoundError + self._traceback = self.classNotFoundError._traceback - def __init__(self, string, end_sentinel): - super(IncompleteInterpolationError, self).__init__(msg=None) - self._ref = string.join(PARAMETER_INTERPOLATION_SENTINELS) - self._end_sentinel = end_sentinel + def _get_error_message(self): + msg = [ 'Inventory Queries:', + '-> {0}'.format(self.classNotFoundError.nodename) ] + msg.append(self.classNotFoundError._get_error_message()) + return msg - def _get_message(self): - msg = "Missing '{0}' to end reference: {1}" - return msg.format(self._end_sentinel, self._ref) + +class InvQueryClassNameResolveError(InterpolationError): + def __init__(self, classNameResolveError, nodename=''): + super(InvQueryClassNameResolveError, self).__init__(msg=None, nodename=nodename) + self.classNameResolveError = classNameResolveError + self._traceback = self.classNameResolveError._traceback + + def _get_error_message(self): + msg = [ 'Inventory Queries:', + '-> {0}'.format(self.classNameResolveError.nodename) ] + msg.append(self.classNameResolveError._get_error_message()) + return msg + + +class ResolveError(InterpolationError): + + def __init__(self, reference, uri=None, context=None): + super(ResolveError, self).__init__(msg=None) + self.reference = reference + + def _get_error_message(self): + msg = 'Cannot resolve {0}'.format(self.reference.join(REFERENCE_SENTINELS)) + self._add_context_and_uri() + return [ msg ] + + +class ResolveErrorList(InterpolationError): + def __init__(self): + super(ResolveErrorList, self).__init__(msg=None) + self.resolve_errors = [] + self._traceback = False + + def add(self, resolve_error): + self.resolve_errors.append(resolve_error) + + def have_errors(self): + return len(self.resolve_errors) > 0 + + def _get_error_message(self): + msgs = [] + for e in self.resolve_errors: + msgs.extend(e._get_error_message()) + return msgs + + +class InvQueryError(InterpolationError): + + def __init__(self, query, resolveError, uri=None, context=None): + super(InvQueryError, self).__init__(msg=None) + self.query = query + self.resolveError = resolveError + self._traceback = self.resolveError._traceback + + def _get_error_message(self): + msg1 = 'Failed inv query {0}'.format(self.query.join(EXPORT_SENTINELS)) + self._add_context_and_uri() + msg2 = '-> {0}'.format(self.resolveError.nodename) + msg3 = self.resolveError._get_error_message() + return [ msg1, msg2, msg3 ] + + +class ParseError(InterpolationError): + + def __init__(self, msg, line, col, lineno, rc=posix.EX_DATAERR): + super(ParseError, self).__init__(rc=rc, msg=None) + self._err = msg + self._line = line + self._col = col + self._lineno = lineno + + def _get_error_message(self): + msg = [ 'Parse error: {0}'.format(self._line.join(EXPORT_SENTINELS)) + self._add_context_and_uri() ] + msg.append('{0} at char {1}'.format(self._err, self._col - 1)) + return msg class InfiniteRecursionError(InterpolationError): - def __init__(self, path, ref): - super(InfiniteRecursionError, self).__init__(msg=None) - self._path = path - self._ref = ref.join(PARAMETER_INTERPOLATION_SENTINELS) + def __init__(self, context, ref, uri): + super(InfiniteRecursionError, self).__init__(msg=None, tbFlag=False, uri=uri) + self.context = context + self.ref = ref - def _get_message(self): - msg = "Infinite recursion while resolving {0} at {1}" - return msg.format(self._ref, self._path) + def _get_error_message(self): + msg = [ 'Infinite recursion: {0}'.format(self.ref.join(REFERENCE_SENTINELS)) + self._add_context_and_uri() ] + return msg + + +class BadReferencesError(InterpolationError): + + def __init__(self, refs, context, uri): + super(BadReferencesError, self).__init__(msg=None, context=context, uri=uri, tbFlag=False) + self.refs = [ r.join(REFERENCE_SENTINELS) for r in refs ] + + def _get_error_message(self): + msg = [ 'Bad references' + self._add_context_and_uri(), + ' ' + ', '.join(self.refs) ] + return msg + + +class TypeMergeError(InterpolationError): + + def __init__(self, value1, value2, uri): + super(TypeMergeError, self).__init__(msg=None, uri=uri, tbFlag=False) + self.type1 = value1.item_type_str() + self.type2 = value2.item_type_str() + + def _get_error_message(self): + msg = [ 'Cannot merge {0} over {1}'.format(self.type1, self.type2) + self._add_context_and_uri() ] + return msg + + +class ExpressionError(InterpolationError): + + def __init__(self, msg, rc=posix.EX_DATAERR, tbFlag=True): + super(ExpressionError, self).__init__(rc=rc, msg=None, tbFlag=tbFlag) + self._error_msg = msg + + def _get_error_message(self): + msg = [ 'Expression error: {0}'.format(self._error_msg) + self._add_context_and_uri() ] + return msg + + +class ChangedConstantError(InterpolationError): + + def __init__(self, uri): + super(ChangedConstantError, self).__init__(msg=None, uri=uri, tbFlag=False) + + def _get_error_message(self): + msg = [ 'Attempt to change constant value' + self._add_context_and_uri() ] + return msg class MappingError(ReclassException): @@ -211,3 +359,10 @@ def _get_message(self): "definition in '{3}'. Nodes can only be defined once " \ "per inventory." return msg.format(self._storage, self._name, self._uris[1], self._uris[0]) + + +class MissingModuleError(ReclassException): + + def __init__(self, modname): + msg = "Module %s is missing" % modname + super(MissingModuleError, self).__init__(rc=posix.EX_DATAERR, msg=msg) diff --git a/reclass/output/__init__.py b/reclass/output/__init__.py index 58cd1019..000952c7 100644 --- a/reclass/output/__init__.py +++ b/reclass/output/__init__.py @@ -6,13 +6,18 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + class OutputterBase(object): def __init__(self): pass def dump(self, data, pretty_print=False): - raise NotImplementedError, "dump() method not yet implemented" + raise NotImplementedError("dump() method not implemented.") class OutputLoader(object): @@ -22,11 +27,10 @@ def __init__(self, outputter): try: self._module = __import__(self._name, globals(), locals(), self._name) except ImportError: - raise NotImplementedError + raise NotImplementedError() def load(self, attr='Outputter'): klass = getattr(self._module, attr, None) if klass is None: - raise AttributeError, \ - 'Outputter class {0} does not export "{1}"'.format(self._name, klass) + raise AttributeError('Outputter class {0} does not export "{1}"'.format(self._name, klass)) return klass diff --git a/reclass/output/json_outputter.py b/reclass/output/json_outputter.py index dab86ed8..5d4cfd4f 100644 --- a/reclass/output/json_outputter.py +++ b/reclass/output/json_outputter.py @@ -6,12 +6,18 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from reclass.output import OutputterBase import json + class Outputter(OutputterBase): - def dump(self, data, pretty_print=False): + def dump(self, data, pretty_print=False, no_refs=False): separators = (',', ': ') if pretty_print else (',', ':') indent = 2 if pretty_print else None return json.dumps(data, indent=indent, separators=separators) diff --git a/reclass/output/yaml_outputter.py b/reclass/output/yaml_outputter.py index 2c70cc32..05519c67 100644 --- a/reclass/output/yaml_outputter.py +++ b/reclass/output/yaml_outputter.py @@ -6,10 +6,30 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from reclass.output import OutputterBase import yaml +_SafeDumper = yaml.CSafeDumper if yaml.__with_libyaml__ else yaml.SafeDumper + + class Outputter(OutputterBase): - def dump(self, data, pretty_print=False): - return yaml.dump(data, default_flow_style=not pretty_print) + def dump(self, data, pretty_print=False, no_refs=False): + if (no_refs): + return yaml.dump(data, default_flow_style=not pretty_print, Dumper=ExplicitDumper) + else: + return yaml.dump(data, default_flow_style=not pretty_print, Dumper=_SafeDumper) + + +class ExplicitDumper(_SafeDumper): + """ + A dumper that will never emit aliases. + """ + + def ignore_aliases(self, data): + return True diff --git a/reclass/settings.py b/reclass/settings.py new file mode 100644 index 00000000..62af976b --- /dev/null +++ b/reclass/settings.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import reclass.defaults as defaults + +from six import string_types, iteritems + + +class Settings(object): + + known_opts = { + 'allow_scalar_over_dict': defaults.OPT_ALLOW_SCALAR_OVER_DICT, + 'allow_scalar_over_list': defaults.OPT_ALLOW_SCALAR_OVER_LIST, + 'allow_list_over_scalar': defaults.OPT_ALLOW_LIST_OVER_SCALAR, + 'allow_dict_over_scalar': defaults.OPT_ALLOW_DICT_OVER_SCALAR, + 'allow_none_override': defaults.OPT_ALLOW_NONE_OVERRIDE, + 'automatic_parameters': defaults.AUTOMATIC_RECLASS_PARAMETERS, + 'default_environment': defaults.DEFAULT_ENVIRONMENT, + 'delimiter': defaults.PARAMETER_INTERPOLATION_DELIMITER, + 'dict_key_override_prefix': + defaults.PARAMETER_DICT_KEY_OVERRIDE_PREFIX, + 'dict_key_constant_prefix': + defaults.PARAMETER_DICT_KEY_CONSTANT_PREFIX, + 'escape_character': defaults.ESCAPE_CHARACTER, + 'export_sentinels': defaults.EXPORT_SENTINELS, + 'inventory_ignore_failed_node': + defaults.OPT_INVENTORY_IGNORE_FAILED_NODE, + 'inventory_ignore_failed_render': + defaults.OPT_INVENTORY_IGNORE_FAILED_RENDER, + 'reference_sentinels': defaults.REFERENCE_SENTINELS, + 'ignore_class_notfound': defaults.OPT_IGNORE_CLASS_NOTFOUND, + 'strict_constant_parameters': + defaults.OPT_STRICT_CONSTANT_PARAMETERS, + 'ignore_class_notfound_regexp': + defaults.OPT_IGNORE_CLASS_NOTFOUND_REGEXP, + 'ignore_class_notfound_warning': + defaults.OPT_IGNORE_CLASS_NOTFOUND_WARNING, + 'ignore_overwritten_missing_references': + defaults.OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES, + 'group_errors': defaults.OPT_GROUP_ERRORS, + 'compose_node_name': defaults.OPT_COMPOSE_NODE_NAME, + } + + def __init__(self, options={}): + for opt_name, opt_value in iteritems(self.known_opts): + setattr(self, opt_name, options.get(opt_name, opt_value)) + + self.dict_key_prefixes = [str(self.dict_key_override_prefix), + str(self.dict_key_constant_prefix)] + if isinstance(self.ignore_class_notfound_regexp, string_types): + self.ignore_class_notfound_regexp = [ + self.ignore_class_notfound_regexp] + + def __eq__(self, other): + if isinstance(other, type(self)): + return all(getattr(self, opt) == getattr(other, opt) + for opt in self.known_opts) + return False + + def __copy__(self): + cls = self.__class__ + result = cls.__new__(cls) + result.__dict__.update(self.__dict__) + return result + + def __deepcopy__(self, memo): + return self.__copy__() diff --git a/reclass/storage/__init__.py b/reclass/storage/__init__.py index 8ae24082..fe873e31 100644 --- a/reclass/storage/__init__.py +++ b/reclass/storage/__init__.py @@ -6,6 +6,12 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass.storage.common import NameMangler class NodeStorageBase(object): @@ -14,14 +20,29 @@ def __init__(self, name): name = property(lambda self: self._name) - def get_node(self, name, merge_base=None): + def get_node(self, name, settings): msg = "Storage class '{0}' does not implement node entity retrieval." raise NotImplementedError(msg.format(self.name)) - def get_class(self, name): + def get_class(self, name, environment, settings): msg = "Storage class '{0}' does not implement class entity retrieval." raise NotImplementedError(msg.format(self.name)) def enumerate_nodes(self): msg = "Storage class '{0}' does not implement node enumeration." raise NotImplementedError(msg.format(self.name)) + + def path_mangler(self): + msg = "Storage class '{0}' does not implement path_mangler." + raise NotImplementedError(msg.format(self.name)) + + +class ExternalNodeStorageBase(NodeStorageBase): + + def __init__(self, name, compose_node_name): + super(ExternalNodeStorageBase, self).__init__(name) + self.class_name_mangler = NameMangler.classes + if compose_node_name: + self.node_name_mangler = NameMangler.composed_nodes + else: + self.node_name_mangler = NameMangler.nodes diff --git a/reclass/storage/common.py b/reclass/storage/common.py new file mode 100644 index 00000000..13db7ec8 --- /dev/null +++ b/reclass/storage/common.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import os + +class NameMangler: + @staticmethod + def nodes(relpath, name): + # nodes are identified just by their basename, so + # no mangling required + return relpath, name + + @staticmethod + def composed_nodes(relpath, name): + if relpath == '.' or relpath == '': + # './' is converted to None + return None, name + parts = relpath.split(os.path.sep) + if parts[0].startswith("_"): + return relpath, name + parts.append(name) + return relpath, '.'.join(parts) + + @staticmethod + def classes(relpath, name): + if relpath == '.' or relpath == '': + # './' is converted to None + return None, name + parts = relpath.split(os.path.sep) + if name != 'init': + # "init" is the directory index, so only append the basename + # to the path parts for all other filenames. This has the + # effect that data in file "foo/init.yml" will be registered + # as data for class "foo", not "foo.init" + parts.append(name) + return relpath, '.'.join(parts) diff --git a/reclass/storage/loader.py b/reclass/storage/loader.py index 399e7fd0..0a66a666 100644 --- a/reclass/storage/loader.py +++ b/reclass/storage/loader.py @@ -6,14 +6,20 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from importlib import import_module class StorageBackendLoader(object): def __init__(self, storage_name): - self._name = 'reclass.storage.' + storage_name + self._name = str('reclass.storage.' + storage_name) try: - self._module = __import__(self._name, globals(), locals(), self._name) - except ImportError: + self._module = import_module(self._name) + except ImportError as e: raise NotImplementedError def load(self, klassname='ExternalNodeStorage'): @@ -21,5 +27,11 @@ def load(self, klassname='ExternalNodeStorage'): if klass is None: raise AttributeError('Storage backend class {0} does not export ' '"{1}"'.format(self._name, klassname)) - return klass + + def path_mangler(self, name='path_mangler'): + function = getattr(self._module, name, None) + if function is None: + raise AttributeError('Storage backend class {0} does not export ' + '"{1}"'.format(self._name, name)) + return function diff --git a/reclass/storage/memcache_proxy.py b/reclass/storage/memcache_proxy.py index 7d9ab5eb..cd90fdd8 100644 --- a/reclass/storage/memcache_proxy.py +++ b/reclass/storage/memcache_proxy.py @@ -6,6 +6,10 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals from reclass.storage import NodeStorageBase @@ -30,30 +34,27 @@ def __init__(self, real_storage, cache_classes=True, cache_nodes=True, name = property(lambda self: self._real_storage.name) - @staticmethod - def _cache_proxy(name, cache, getter): + def get_node(self, name, settings): + if not self._cache_nodes: + return self._real_storage.get_node(name, settings) try: - ret = cache[name] - - except KeyError, e: - ret = getter(name) - cache[name] = ret - + return self._nodes_cache[name] + except KeyError as e: + ret = self._real_storage.get_node(name, settings) + self._nodes_cache[name] = ret return ret - def get_node(self, name): - if not self._cache_nodes: - return self._real_storage.get_node(name) - - return MemcacheProxy._cache_proxy(name, self._nodes_cache, - self._real_storage.get_node) - - def get_class(self, name): + def get_class(self, name, environment, settings): if not self._cache_classes: - return self._real_storage.get_class(name) - - return MemcacheProxy._cache_proxy(name, self._classes_cache, - self._real_storage.get_class) + return self._real_storage.get_class(name, environment, settings) + try: + return self._classes_cache[environment][name] + except KeyError as e: + if environment not in self._classes_cache: + self._classes_cache[environment] = dict() + ret = self._real_storage.get_class(name, environment, settings) + self._classes_cache[environment][name] = ret + return ret def enumerate_nodes(self): if not self._cache_nodelist: diff --git a/reclass/storage/mixed/__init__.py b/reclass/storage/mixed/__init__.py new file mode 100644 index 00000000..45262cca --- /dev/null +++ b/reclass/storage/mixed/__init__.py @@ -0,0 +1,64 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import collections +import copy + +from six import iteritems + +import reclass.errors +from reclass import get_storage +from reclass.storage import ExternalNodeStorageBase + +def path_mangler(inventory_base_uri, nodes_uri, classes_uri): + if nodes_uri == classes_uri: + raise errors.DuplicateUriError(nodes_uri, classes_uri) + return nodes_uri, classes_uri + +STORAGE_NAME = 'mixed' + +class ExternalNodeStorage(ExternalNodeStorageBase): + + MixedUri = collections.namedtuple('MixedURI', 'storage_type options') + + def __init__(self, nodes_uri, classes_uri, compose_node_name): + super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) + + self._nodes_uri = self._uri(nodes_uri) + self._nodes_storage = get_storage(self._nodes_uri.storage_type, self._nodes_uri.options, None, compose_node_name) + self._classes_default_uri = self._uri(classes_uri) + self._classes_default_storage = get_storage(self._classes_default_uri.storage_type, None, self._classes_default_uri.options, compose_node_name) + + self._classes_storage = dict() + if 'env_overrides' in classes_uri: + for override in classes_uri['env_overrides']: + for (env, options) in iteritems(override): + uri = copy.deepcopy(classes_uri) + uri.update(options) + uri = self._uri(uri) + self._classes_storage[env] = get_storage(uri.storage_type, None, uri.options, compose_node_name) + + def _uri(self, uri): + ret = copy.deepcopy(uri) + ret['storage_type'] = uri['storage_type'] + if 'env_overrides' in ret: + del ret['env_overrides'] + if uri['storage_type'] == 'yaml_fs': + ret = ret['uri'] + return self.MixedUri(uri['storage_type'], ret) + + def get_node(self, name, settings): + return self._nodes_storage.get_node(name, settings) + + def get_class(self, name, environment, settings): + storage = self._classes_storage.get(environment, self._classes_default_storage) + return storage.get_class(name, environment, settings) + + def enumerate_nodes(self): + return self._nodes_storage.enumerate_nodes() diff --git a/reclass/storage/tests/__init__.py b/reclass/storage/tests/__init__.py index e69de29b..9aaaf25a 100644 --- a/reclass/storage/tests/__init__.py +++ b/reclass/storage/tests/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals diff --git a/reclass/storage/tests/test_loader.py b/reclass/storage/tests/test_loader.py index 6bef87f6..12cdec3f 100644 --- a/reclass/storage/tests/test_loader.py +++ b/reclass/storage/tests/test_loader.py @@ -6,10 +6,16 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from reclass.storage.loader import StorageBackendLoader import unittest + class TestLoader(unittest.TestCase): def test_load(self): diff --git a/reclass/storage/tests/test_memcache_proxy.py b/reclass/storage/tests/test_memcache_proxy.py index 066c27ef..24acf203 100644 --- a/reclass/storage/tests/test_memcache_proxy.py +++ b/reclass/storage/tests/test_memcache_proxy.py @@ -6,15 +6,23 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass.settings import Settings from reclass.storage.memcache_proxy import MemcacheProxy from reclass.storage import NodeStorageBase import unittest + try: import unittest.mock as mock except ImportError: import mock + class TestMemcacheProxy(unittest.TestCase): def setUp(self): @@ -22,48 +30,48 @@ def setUp(self): def test_no_nodes_caching(self): p = MemcacheProxy(self._storage, cache_nodes=False) - NAME = 'foo'; NAME2 = 'bar'; RET = 'baz' + NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_node.return_value = RET - self.assertEqual(p.get_node(NAME), RET) - self.assertEqual(p.get_node(NAME), RET) - self.assertEqual(p.get_node(NAME2), RET) - self.assertEqual(p.get_node(NAME2), RET) - expected = [mock.call(NAME), mock.call(NAME), - mock.call(NAME2), mock.call(NAME2)] + self.assertEqual(p.get_node(NAME, SETTINGS), RET) + self.assertEqual(p.get_node(NAME, SETTINGS), RET) + self.assertEqual(p.get_node(NAME2, SETTINGS), RET) + self.assertEqual(p.get_node(NAME2, SETTINGS), RET) + expected = [mock.call(NAME, SETTINGS), mock.call(NAME, SETTINGS), + mock.call(NAME2, SETTINGS), mock.call(NAME2, SETTINGS)] self.assertListEqual(self._storage.get_node.call_args_list, expected) def test_nodes_caching(self): p = MemcacheProxy(self._storage, cache_nodes=True) - NAME = 'foo'; NAME2 = 'bar'; RET = 'baz' + NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_node.return_value = RET - self.assertEqual(p.get_node(NAME), RET) - self.assertEqual(p.get_node(NAME), RET) - self.assertEqual(p.get_node(NAME2), RET) - self.assertEqual(p.get_node(NAME2), RET) - expected = [mock.call(NAME), mock.call(NAME2)] # called once each + self.assertEqual(p.get_node(NAME, SETTINGS), RET) + self.assertEqual(p.get_node(NAME, SETTINGS), RET) + self.assertEqual(p.get_node(NAME2, SETTINGS), RET) + self.assertEqual(p.get_node(NAME2, SETTINGS), RET) + expected = [mock.call(NAME, SETTINGS), mock.call(NAME2, SETTINGS)] # called once each self.assertListEqual(self._storage.get_node.call_args_list, expected) def test_no_classes_caching(self): p = MemcacheProxy(self._storage, cache_classes=False) - NAME = 'foo'; NAME2 = 'bar'; RET = 'baz' + NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_class.return_value = RET - self.assertEqual(p.get_class(NAME), RET) - self.assertEqual(p.get_class(NAME), RET) - self.assertEqual(p.get_class(NAME2), RET) - self.assertEqual(p.get_class(NAME2), RET) - expected = [mock.call(NAME), mock.call(NAME), - mock.call(NAME2), mock.call(NAME2)] + self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) + self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) + self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) + self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) + expected = [mock.call(NAME, None, SETTINGS), mock.call(NAME, None, SETTINGS), + mock.call(NAME2, None, SETTINGS), mock.call(NAME2, None, SETTINGS)] self.assertListEqual(self._storage.get_class.call_args_list, expected) def test_classes_caching(self): p = MemcacheProxy(self._storage, cache_classes=True) - NAME = 'foo'; NAME2 = 'bar'; RET = 'baz' + NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_class.return_value = RET - self.assertEqual(p.get_class(NAME), RET) - self.assertEqual(p.get_class(NAME), RET) - self.assertEqual(p.get_class(NAME2), RET) - self.assertEqual(p.get_class(NAME2), RET) - expected = [mock.call(NAME), mock.call(NAME2)] # called once each + self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) + self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) + self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) + self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) + expected = [mock.call(NAME, None, SETTINGS), mock.call(NAME2, None, SETTINGS)] # called once each self.assertListEqual(self._storage.get_class.call_args_list, expected) def test_nodelist_no_caching(self): diff --git a/reclass/storage/tests/test_yamldata.py b/reclass/storage/tests/test_yamldata.py new file mode 100644 index 00000000..5c48db60 --- /dev/null +++ b/reclass/storage/tests/test_yamldata.py @@ -0,0 +1,41 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass (http://github.com/madduck/reclass) +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass.storage.yamldata import YamlData + +import unittest + +class TestYamlData(unittest.TestCase): + + def setUp(self): + lines = [ 'classes:', + ' - testdir.test1', + ' - testdir.test2', + ' - test3', + '', + 'environment: base', + '', + 'parameters:', + ' _TEST_:', + ' alpha: 1', + ' beta: two' ] + self.data = '\n'.join(lines) + self.yamldict = { 'classes': [ 'testdir.test1', 'testdir.test2', 'test3' ], + 'environment': 'base', + 'parameters': { '_TEST_': { 'alpha': 1, 'beta': 'two' } } + } + + def test_yaml_from_string(self): + res = YamlData.from_string(self.data, 'testpath') + self.assertEqual(res.uri, 'testpath') + self.assertEqual(res.get_data(), self.yamldict) + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index 5a13050e..3577b362 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -6,49 +6,63 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import os, sys -import fnmatch -from reclass.storage import NodeStorageBase -from yamlfile import YamlFile -from directory import Directory +import yaml +from reclass.output.yaml_outputter import ExplicitDumper +from reclass.storage import ExternalNodeStorageBase +from reclass.storage.yamldata import YamlData +from .directory import Directory from reclass.datatypes import Entity import reclass.errors -FILE_EXTENSION = '.yml' +FILE_EXTENSION = ('.yml', '.yaml') STORAGE_NAME = 'yaml_fs' def vvv(msg): - #print >>sys.stderr, msg + #print(msg, file=sys.stderr) pass -class ExternalNodeStorage(NodeStorageBase): - - def __init__(self, nodes_uri, classes_uri, default_environment=None): - super(ExternalNodeStorage, self).__init__(STORAGE_NAME) - - def name_mangler(relpath, name): - # nodes are identified just by their basename, so - # no mangling required - return relpath, name - self._nodes_uri = nodes_uri - self._nodes = self._enumerate_inventory(nodes_uri, name_mangler) - - def name_mangler(relpath, name): - if relpath == '.': - # './' is converted to None - return None, name - parts = relpath.split(os.path.sep) - if name != 'init': - # "init" is the directory index, so only append the basename - # to the path parts for all other filenames. This has the - # effect that data in file "foo/init.yml" will be registered - # as data for class "foo", not "foo.init" - parts.append(name) - return relpath, '.'.join(parts) - self._classes_uri = classes_uri - self._classes = self._enumerate_inventory(classes_uri, name_mangler) - - self._default_environment = default_environment +def path_mangler(inventory_base_uri, nodes_uri, classes_uri): + + if inventory_base_uri is None: + # if inventory_base is not given, default to current directory + inventory_base_uri = os.getcwd() + + nodes_uri = nodes_uri or 'nodes' + classes_uri = classes_uri or 'classes' + + def _path_mangler_inner(path): + ret = os.path.join(inventory_base_uri, path) + ret = os.path.expanduser(ret) + return os.path.abspath(ret) + + n, c = map(_path_mangler_inner, (nodes_uri, classes_uri)) + if n == c: + raise errors.DuplicateUriError(n, c) + common = os.path.commonprefix((n, c)) + if common == n or common == c: + raise errors.UriOverlapError(n, c) + + return n, c + + +class ExternalNodeStorage(ExternalNodeStorageBase): + + def __init__(self, nodes_uri, classes_uri, compose_node_name): + super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) + + if nodes_uri is not None: + self._nodes_uri = nodes_uri + self._nodes = self._enumerate_inventory(nodes_uri, self.node_name_mangler) + + if classes_uri is not None: + self._classes_uri = classes_uri + self._classes = self._enumerate_inventory(classes_uri, self.class_name_mangler) nodes_uri = property(lambda self: self._nodes_uri) classes_uri = property(lambda self: self._classes_uri) @@ -56,7 +70,7 @@ def name_mangler(relpath, name): def _enumerate_inventory(self, basedir, name_mangler): ret = {} def register_fn(dirpath, filenames): - filenames = fnmatch.filter(filenames, '*{0}'.format(FILE_EXTENSION)) + filenames = [f for f in filenames if f.endswith(FILE_EXTENSION)] vvv('REGISTER {0} in path {1}'.format(filenames, dirpath)) for f in filenames: name = os.path.splitext(f)[0] @@ -76,24 +90,23 @@ def register_fn(dirpath, filenames): d.walk(register_fn) return ret - def get_node(self, name): + def get_node(self, name, settings): vvv('GET NODE {0}'.format(name)) try: relpath = self._nodes[name] path = os.path.join(self.nodes_uri, relpath) - name = os.path.splitext(relpath)[0] - except KeyError, e: + except KeyError as e: raise reclass.errors.NodeNotFound(self.name, name, self.nodes_uri) - entity = YamlFile(path).get_entity(name, self._default_environment) + entity = YamlData.from_file(path).get_entity(name, settings) return entity - def get_class(self, name, nodename=None): + def get_class(self, name, environment, settings): vvv('GET CLASS {0}'.format(name)) try: path = os.path.join(self.classes_uri, self._classes[name]) - except KeyError, e: + except KeyError as e: raise reclass.errors.ClassNotFound(self.name, name, self.classes_uri) - entity = YamlFile(path).get_entity(name) + entity = YamlData.from_file(path).get_entity(name, settings) return entity def enumerate_nodes(self): diff --git a/reclass/storage/yaml_fs/directory.py b/reclass/storage/yaml_fs/directory.py index 03302b73..4e11643d 100644 --- a/reclass/storage/yaml_fs/directory.py +++ b/reclass/storage/yaml_fs/directory.py @@ -6,17 +6,22 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import os -import sys from reclass.errors import NotFoundError -SKIPDIRS = ( 'CVS', 'SCCS' ) -FILE_EXTENSION = '.yml' +SKIPDIRS = ('CVS', 'SCCS') +FILE_EXTENSION = ('.yml', '.yaml') def vvv(msg): - #print >>sys.stderr, msg + #print(msg, file=sys.stderr) pass + class Directory(object): def __init__(self, path, fileclass=None): @@ -39,7 +44,8 @@ def _register_files(self, dirpath, filenames): files = property(lambda self: self._files) def walk(self, register_fn=None): - if not callable(register_fn): register_fn = self._register_files + if not callable(register_fn): + register_fn = self._register_files def _error(exc): raise(exc) diff --git a/reclass/storage/yaml_fs/yamlfile.py b/reclass/storage/yaml_fs/yamlfile.py deleted file mode 100644 index 717a9117..00000000 --- a/reclass/storage/yaml_fs/yamlfile.py +++ /dev/null @@ -1,61 +0,0 @@ -# -# -*- coding: utf-8 -*- -# -# This file is part of reclass (http://github.com/madduck/reclass) -# -# Copyright © 2007–14 martin f. krafft -# Released under the terms of the Artistic Licence 2.0 -# -from reclass import datatypes -import yaml -import os -from reclass.errors import NotFoundError - -class YamlFile(object): - - def __init__(self, path): - ''' Initialise a yamlfile object ''' - if not os.path.isfile(path): - raise NotFoundError('No such file: %s' % path) - if not os.access(path, os.R_OK): - raise NotFoundError('Cannot open: %s' % path) - self._path = path - self._data = dict() - self._read() - path = property(lambda self: self._path) - - def _read(self): - fp = file(self._path) - data = yaml.safe_load(fp) - if data is not None: - self._data = data - fp.close() - - def get_entity(self, name=None, default_environment=None): - classes = self._data.get('classes') - if classes is None: - classes = [] - classes = datatypes.Classes(classes) - - applications = self._data.get('applications') - if applications is None: - applications = [] - applications = datatypes.Applications(applications) - - parameters = self._data.get('parameters') - if parameters is None: - parameters = {} - parameters = datatypes.Parameters(parameters) - - env = self._data.get('environment', default_environment) - - if name is None: - name = self._path - - return datatypes.Entity(classes, applications, parameters, - name=name, environment=env, - uri='yaml_fs://{0}'.format(self._path)) - - def __repr__(self): - return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._path, - self._data.keys()) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py new file mode 100644 index 00000000..06d839b4 --- /dev/null +++ b/reclass/storage/yaml_git/__init__.py @@ -0,0 +1,316 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import collections +import distutils.version +import errno +import fcntl +import os +import time + +# Squelch warning on centos7 due to upgrading cffi +# see https://github.com/saltstack/salt/pull/39871 +import warnings +with warnings.catch_warnings(): + warnings.simplefilter('ignore') + try: + # NOTE: in some distros pygit2 could require special effort to acquire. + # It is not a problem per se, but it breaks tests for no real reason. + # This try block is for keeping tests sane. + import pygit2 + except ImportError: + pygit2 = None + +from six import iteritems + +import reclass.errors +from reclass.storage import ExternalNodeStorageBase +from reclass.storage.yamldata import YamlData + +FILE_EXTENSION = ('.yml', '.yaml') +STORAGE_NAME = 'yaml_git' + +def path_mangler(inventory_base_uri, nodes_uri, classes_uri): + if nodes_uri == classes_uri: + raise errors.DuplicateUriError(nodes_uri, classes_uri) + return nodes_uri, classes_uri + + +GitMD = collections.namedtuple('GitMD', ['name', 'path', 'id'], verbose=False, rename=False) + + +class GitURI(object): + + def __init__(self, dictionary): + self.repo = None + self.branch = None + self.root = None + self.cache_dir = None + self.lock_dir = None + self.pubkey = None + self.privkey = None + self.password = None + self.update(dictionary) + + def update(self, dictionary): + if 'repo' in dictionary: self.repo = dictionary['repo'] + if 'branch' in dictionary: self.branch = dictionary['branch'] + if 'cache_dir' in dictionary: self.cache_dir = dictionary['cache_dir'] + if 'lock_dir' in dictionary: self.lock_dir = dictionary['lock_dir'] + if 'pubkey' in dictionary: self.pubkey = dictionary['pubkey'] + if 'privkey' in dictionary: self.privkey = dictionary['privkey'] + if 'password' in dictionary: self.password = dictionary['password'] + if 'root' in dictionary: + if dictionary['root'] is None: + self.root = None + else: + self.root = dictionary['root'].replace('/', '.') + + def __repr__(self): + return '<{0}: {1} {2} {3}>'.format(self.__class__.__name__, self.repo, self.branch, self.root) + + +class LockFile(): + def __init__(self, file): + self._file = file + + def __enter__(self): + self._fd = open(self._file, 'w+') + start = time.time() + while True: + if (time.time() - start) > 120: + raise IOError('Timeout waiting to lock file: {0}'.format(self._file)) + try: + fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + break + except IOError as e: + # raise on unrelated IOErrors + if e.errno != errno.EAGAIN: + raise + else: + time.sleep(0.1) + + def __exit__(self, type, value, traceback): + self._fd.close() + + +class GitRepo(object): + def __init__(self, uri, node_name_mangler, class_name_mangler): + if pygit2 is None: + raise errors.MissingModuleError('pygit2') + self.transport, _, self.url = uri.repo.partition('://') + self.name = self.url.replace('/', '_') + self.credentials = None + self.remotecallbacks = None + if uri.cache_dir is None: + self.cache_dir = '{0}/{1}/{2}'.format(os.path.expanduser("~"), '.reclass/cache/git', self.name) + else: + self.cache_dir = '{0}/{1}'.format(uri.cache_dir, self.name) + if uri.lock_dir is None: + self.lock_file = '{0}/{1}/{2}'.format(os.path.expanduser("~"), '.reclass/cache/lock', self.name) + else: + self.lock_file = '{0}/{1}'.format(uri.lock_dir, self.name) + lock_dir = os.path.dirname(self.lock_file) + if not os.path.exists(lock_dir): + os.makedirs(lock_dir) + self._node_name_mangler = node_name_mangler + self._class_name_mangler = class_name_mangler + with LockFile(self.lock_file): + self._init_repo(uri) + self._fetch() + self.branches = self.repo.listall_branches() + self.files = self.files_in_repo() + + def _init_repo(self, uri): + if os.path.exists(self.cache_dir): + self.repo = pygit2.Repository(self.cache_dir) + else: + os.makedirs(self.cache_dir) + self.repo = pygit2.init_repository(self.cache_dir, bare=True) + self.repo.create_remote('origin', self.url) + if 'ssh' in self.transport: + if '@' in self.url: + user, _, _ = self.url.partition('@') + else: + user = 'gitlab' + + if uri.pubkey is not None: + creds = pygit2.Keypair(user, uri.pubkey, uri.privkey, uri.password) + else: + creds = pygit2.KeypairFromAgent(user) + + pygit2_version = pygit2.__version__ + if distutils.version.LooseVersion(pygit2_version) >= distutils.version.LooseVersion('0.23.2'): + self.remotecallbacks = pygit2.RemoteCallbacks(credentials=creds) + self.credentials = None + else: + self.remotecallbacks = None + self.credentials = creds + + def _fetch(self): + origin = self.repo.remotes[0] + fetch_kwargs = {} + if self.remotecallbacks is not None: + fetch_kwargs['callbacks'] = self.remotecallbacks + if self.credentials is not None: + origin.credentials = self.credentials + fetch_results = origin.fetch(**fetch_kwargs) + remote_branches = self.repo.listall_branches(pygit2.GIT_BRANCH_REMOTE) + local_branches = self.repo.listall_branches() + for remote_branch_name in remote_branches: + _, _, local_branch_name = remote_branch_name.partition('/') + remote_branch = self.repo.lookup_branch(remote_branch_name, pygit2.GIT_BRANCH_REMOTE) + if local_branch_name not in local_branches: + local_branch = self.repo.create_branch(local_branch_name, self.repo[remote_branch.target.hex]) + local_branch.upstream = remote_branch + else: + local_branch = self.repo.lookup_branch(local_branch_name) + if local_branch.target != remote_branch.target: + local_branch.set_target(remote_branch.target) + + local_branches = self.repo.listall_branches() + for local_branch_name in local_branches: + remote_branch_name = '{0}/{1}'.format(origin.name, local_branch_name) + if remote_branch_name not in remote_branches: + local_branch = self.repo.lookup_branch(local_branch_name) + local.branch.delete() + + def get(self, id): + return self.repo.get(id) + + def files_in_tree(self, tree, path): + files = [] + for entry in tree: + if entry.filemode == pygit2.GIT_FILEMODE_TREE: + subtree = self.repo.get(entry.id) + if path == '': + subpath = entry.name + else: + subpath = '/'.join([path, entry.name]) + files.extend(self.files_in_tree(subtree, subpath)) + else: + if path == '': + relpath = entry.name + else: + relpath = '/'.join([path, entry.name]) + files.append(GitMD(entry.name, relpath, entry.id)) + return files + + def files_in_branch(self, branch): + tree = self.repo.revparse_single(branch).tree + return self.files_in_tree(tree, '') + + def files_in_repo(self): + ret = {} + for bname in self.branches: + branch = {} + files = self.files_in_branch(bname) + for file in files: + if file.name.endswith(FILE_EXTENSION): + name = os.path.splitext(file.name)[0] + relpath = os.path.dirname(file.path) + if callable(self._class_name_mangler): + relpath, name = self._class_name_mangler(relpath, name) + if name in ret: + raise reclass.errors.DuplicateNodeNameError(self.name + ' - ' + bname, name, ret[name], path) + else: + branch[name] = file + ret[bname] = branch + return ret + + def nodes(self, branch, subdir): + ret = {} + for (name, file) in iteritems(self.files[branch]): + if subdir is None or name.startswith(subdir): + node_name = os.path.splitext(file.name)[0] + relpath = os.path.dirname(file.path) + if callable(self._node_name_mangler): + relpath, node_name = self._node_name_mangler(relpath, node_name) + if node_name in ret: + raise reclass.errors.DuplicateNodeNameError(self.name, name, files[name], path) + else: + ret[node_name] = file + return ret + + +class ExternalNodeStorage(ExternalNodeStorageBase): + def __init__(self, nodes_uri, classes_uri, compose_node_name): + super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) + self._repos = dict() + + if nodes_uri is not None: + self._nodes_uri = GitURI({ 'branch': 'master' }) + self._nodes_uri.update(nodes_uri) + self._load_repo(self._nodes_uri) + self._nodes = self._repos[self._nodes_uri.repo].nodes(self._nodes_uri.branch, self._nodes_uri.root) + + if classes_uri is not None: + self._classes_default_uri = GitURI({ 'branch': '__env__' }) + self._classes_default_uri.update(classes_uri) + self._load_repo(self._classes_default_uri) + + self._classes_uri = [] + if 'env_overrides' in classes_uri: + for override in classes_uri['env_overrides']: + for (env, options) in iteritems(override): + uri = GitURI(self._classes_default_uri) + uri.update({ 'branch': env }) + uri.update(options) + self._classes_uri.append((env, uri)) + self._load_repo(uri) + + self._classes_uri.append(('*', self._classes_default_uri)) + + nodes_uri = property(lambda self: self._nodes_uri) + classes_uri = property(lambda self: self._classes_uri) + + def get_node(self, name, settings): + file = self._nodes[name] + blob = self._repos[self._nodes_uri.repo].get(file.id) + entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(self._nodes_uri.repo, self._nodes_uri.branch, file.path)).get_entity(name, settings) + return entity + + def get_class(self, name, environment, settings): + uri = self._env_to_uri(environment) + if uri.root is not None: + name = '{0}.{1}'.format(uri.root, name) + if uri.repo not in self._repos: + raise reclass.errors.NotFoundError("Repo " + uri.repo + " unknown or missing") + if uri.branch not in self._repos[uri.repo].files: + raise reclass.errors.NotFoundError("Branch " + uri.branch + " missing from " + uri.repo) + if name not in self._repos[uri.repo].files[uri.branch]: + raise reclass.errors.NotFoundError("File " + name + " missing from " + uri.repo + " branch " + uri.branch) + file = self._repos[uri.repo].files[uri.branch][name] + blob = self._repos[uri.repo].get(file.id) + entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, file.path)).get_entity(name, settings) + return entity + + def enumerate_nodes(self): + return self._nodes.keys() + + def _load_repo(self, uri): + if uri.repo not in self._repos: + self._repos[uri.repo] = GitRepo(uri, self.node_name_mangler, self.class_name_mangler) + + def _env_to_uri(self, environment): + ret = None + if environment is None: + ret = self._classes_default_uri + else: + for env, uri in self._classes_uri: + if env == environment: + ret = uri + break + if ret is None: + ret = self._classes_default_uri + if ret.branch == '__env__': + ret.branch = environment + if ret.branch == None: + ret.branch = 'master' + return ret diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py new file mode 100644 index 00000000..a38b589a --- /dev/null +++ b/reclass/storage/yamldata.py @@ -0,0 +1,119 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass (http://github.com/madduck/reclass) +# +# Copyright © 2007–14 martin f. krafft +# Released under the terms of the Artistic Licence 2.0 +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass import datatypes +import yaml +import os +from reclass.errors import NotFoundError + +_SafeLoader = yaml.CSafeLoader if yaml.__with_libyaml__ else yaml.SafeLoader + +class YamlData(object): + + @classmethod + def from_file(cls, path): + ''' Initialise yaml data from a local file ''' + abs_path = os.path.abspath(path) + if not os.path.isfile(abs_path): + raise NotFoundError('No such file: %s' % abs_path) + if not os.access(abs_path, os.R_OK): + raise NotFoundError('Cannot open: %s' % abs_path) + y = cls('yaml_fs://{0}'.format(abs_path)) + with open(abs_path) as fp: + data = yaml.load(fp, Loader=_SafeLoader) + if data is not None: + y._data = data + return y + + @classmethod + def from_string(cls, string, uri): + ''' Initialise yaml data from a string ''' + y = cls(uri) + data = yaml.load(string, Loader=_SafeLoader) + if data is not None: + y._data = data + return y + + def __init__(self, uri): + self._uri = uri + self._data = dict() + + uri = property(lambda self: self._uri) + + def get_data(self): + return self._data + + def set_absolute_names(self, name, names): + new_names = [] + for n in names: + if n[0] == '.': + dots = self.count_dots(n) + levels_up = (dots * (-1)) + parent = '.'.join(name.split('.')[0:levels_up]) + if parent == '': + n = n[dots:] + else: + n = parent + n[dots - 1:] + new_names.append(n) + return new_names + + def yield_dots(self, value): + try: + idx = value.index('.') + except ValueError: + return + if idx == 0: + yield '.' + for dot in self.yield_dots(value[1:]): + yield dot + + def count_dots(self, value): + return len(list(self.yield_dots(value))) + + def get_entity(self, name, settings): + #if name is None: + # name = self._uri + + classes = self._data.get('classes') + if classes is None: + classes = [] + classes = self.set_absolute_names(name, classes) + classes = datatypes.Classes(classes) + + applications = self._data.get('applications') + if applications is None: + applications = [] + applications = datatypes.Applications(applications) + + parameters = self._data.get('parameters') + if parameters is None: + parameters = {} + parameters = datatypes.Parameters(parameters, settings, self._uri) + + exports = self._data.get('exports') + if exports is None: + exports = {} + exports = datatypes.Exports(exports, settings, self._uri) + + env = self._data.get('environment', None) + + return datatypes.Entity(settings, classes=classes, applications=applications, parameters=parameters, + exports=exports, name=name, environment=env, uri=self.uri) + + def __str__(self): + return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._uri, + self._data) + + def __repr__(self): + return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._uri, + self._data.keys()) diff --git a/reclass/tests/__init__.py b/reclass/tests/__init__.py new file mode 100644 index 00000000..9aaaf25a --- /dev/null +++ b/reclass/tests/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals diff --git a/reclass/tests/data/01/classes/standard.yml b/reclass/tests/data/01/classes/standard.yml new file mode 100644 index 00000000..13bce54a --- /dev/null +++ b/reclass/tests/data/01/classes/standard.yml @@ -0,0 +1,4 @@ +parameters: + int: 1 + string: '1' + bool: True diff --git a/reclass/tests/data/01/nodes/class_notfound.yml b/reclass/tests/data/01/nodes/class_notfound.yml new file mode 100644 index 00000000..616a49bf --- /dev/null +++ b/reclass/tests/data/01/nodes/class_notfound.yml @@ -0,0 +1,5 @@ +classes: + - missing + +parameters: + node_test: class not found diff --git a/reclass/tests/data/01/nodes/data_types.yml b/reclass/tests/data/01/nodes/data_types.yml new file mode 100644 index 00000000..28ff151b --- /dev/null +++ b/reclass/tests/data/01/nodes/data_types.yml @@ -0,0 +1,2 @@ +classes: + - standard diff --git a/reclass/tests/data/02/classes/four.yml b/reclass/tests/data/02/classes/four.yml new file mode 100644 index 00000000..1f9873cb --- /dev/null +++ b/reclass/tests/data/02/classes/four.yml @@ -0,0 +1,2 @@ +parameters: + four_alpha: 3 diff --git a/reclass/tests/data/02/classes/init.yml b/reclass/tests/data/02/classes/init.yml new file mode 100644 index 00000000..e40b8992 --- /dev/null +++ b/reclass/tests/data/02/classes/init.yml @@ -0,0 +1,2 @@ +parameters: + alpha_init: 5 \ No newline at end of file diff --git a/reclass/tests/data/02/classes/one/alpha.yml b/reclass/tests/data/02/classes/one/alpha.yml new file mode 100644 index 00000000..9454cd04 --- /dev/null +++ b/reclass/tests/data/02/classes/one/alpha.yml @@ -0,0 +1,13 @@ +classes: +- .beta +- two.beta +- ..four +- ..two.gamma +- ..init + +parameters: + test1: ${one_beta} + test2: ${two_beta} + test3: ${four_alpha} + test4: ${two_gamma} + test5: ${alpha_init} diff --git a/reclass/tests/data/02/classes/one/beta.yml b/reclass/tests/data/02/classes/one/beta.yml new file mode 100644 index 00000000..f754252a --- /dev/null +++ b/reclass/tests/data/02/classes/one/beta.yml @@ -0,0 +1,2 @@ +parameters: + one_beta: 1 diff --git a/reclass/tests/data/02/classes/three.yml b/reclass/tests/data/02/classes/three.yml new file mode 100644 index 00000000..987fde06 --- /dev/null +++ b/reclass/tests/data/02/classes/three.yml @@ -0,0 +1,2 @@ +classes: +- .one.alpha diff --git a/reclass/tests/data/02/classes/two/beta.yml b/reclass/tests/data/02/classes/two/beta.yml new file mode 100644 index 00000000..1f578b2f --- /dev/null +++ b/reclass/tests/data/02/classes/two/beta.yml @@ -0,0 +1,2 @@ +parameters: + two_beta: 2 diff --git a/reclass/tests/data/02/classes/two/gamma.yml b/reclass/tests/data/02/classes/two/gamma.yml new file mode 100644 index 00000000..a1d71da2 --- /dev/null +++ b/reclass/tests/data/02/classes/two/gamma.yml @@ -0,0 +1,2 @@ +parameters: + two_gamma: 4 diff --git a/reclass/tests/data/02/nodes/relative.yml b/reclass/tests/data/02/nodes/relative.yml new file mode 100644 index 00000000..1f2bbdc7 --- /dev/null +++ b/reclass/tests/data/02/nodes/relative.yml @@ -0,0 +1,2 @@ +classes: + - one.alpha diff --git a/reclass/tests/data/02/nodes/top_relative.yml b/reclass/tests/data/02/nodes/top_relative.yml new file mode 100644 index 00000000..5dae5beb --- /dev/null +++ b/reclass/tests/data/02/nodes/top_relative.yml @@ -0,0 +1,2 @@ +classes: + - three diff --git a/reclass/tests/data/03/classes/a.yml b/reclass/tests/data/03/classes/a.yml new file mode 100644 index 00000000..748a2974 --- /dev/null +++ b/reclass/tests/data/03/classes/a.yml @@ -0,0 +1,6 @@ +parameters: + a: 1 + alpha: + - ${a} + beta: + a: ${a} diff --git a/reclass/tests/data/03/classes/b.yml b/reclass/tests/data/03/classes/b.yml new file mode 100644 index 00000000..cce2609e --- /dev/null +++ b/reclass/tests/data/03/classes/b.yml @@ -0,0 +1,6 @@ +parameters: + b: 2 + alpha: + - ${b} + beta: + b: ${b} diff --git a/reclass/tests/data/03/classes/c.yml b/reclass/tests/data/03/classes/c.yml new file mode 100644 index 00000000..7441417e --- /dev/null +++ b/reclass/tests/data/03/classes/c.yml @@ -0,0 +1,6 @@ +parameters: + c: 3 + alpha: + - ${c} + beta: + c: ${c} diff --git a/reclass/tests/data/03/classes/d.yml b/reclass/tests/data/03/classes/d.yml new file mode 100644 index 00000000..e61a1ff0 --- /dev/null +++ b/reclass/tests/data/03/classes/d.yml @@ -0,0 +1,6 @@ +parameters: + d: 4 + alpha: + - ${d} + beta: + d: ${d} diff --git a/reclass/tests/data/03/nodes/alpha/one.yml b/reclass/tests/data/03/nodes/alpha/one.yml new file mode 100644 index 00000000..f2b613d0 --- /dev/null +++ b/reclass/tests/data/03/nodes/alpha/one.yml @@ -0,0 +1,3 @@ +classes: +- a +- b diff --git a/reclass/tests/data/03/nodes/alpha/two.yml b/reclass/tests/data/03/nodes/alpha/two.yml new file mode 100644 index 00000000..b020af36 --- /dev/null +++ b/reclass/tests/data/03/nodes/alpha/two.yml @@ -0,0 +1,3 @@ +classes: +- a +- c diff --git a/reclass/tests/data/03/nodes/beta/one.yml b/reclass/tests/data/03/nodes/beta/one.yml new file mode 100644 index 00000000..168a4fb8 --- /dev/null +++ b/reclass/tests/data/03/nodes/beta/one.yml @@ -0,0 +1,3 @@ +classes: +- b +- c diff --git a/reclass/tests/data/03/nodes/beta/two.yml b/reclass/tests/data/03/nodes/beta/two.yml new file mode 100644 index 00000000..56c63433 --- /dev/null +++ b/reclass/tests/data/03/nodes/beta/two.yml @@ -0,0 +1,3 @@ +classes: +- c +- d diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py new file mode 100644 index 00000000..4827177b --- /dev/null +++ b/reclass/tests/test_core.py @@ -0,0 +1,91 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass (http://github.com/madduck/reclass) +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import os + +from reclass import get_storage, get_path_mangler +from reclass.core import Core +from reclass.settings import Settings +from reclass.errors import ClassNotFound + +import unittest +try: + import unittest.mock as mock +except ImportError: + import mock + +class TestCore(unittest.TestCase): + + def _core(self, dataset, opts={}): + inventory_uri = os.path.dirname(os.path.abspath(__file__)) + '/data/' + dataset + path_mangler = get_path_mangler('yaml_fs') + nodes_uri, classes_uri = path_mangler(inventory_uri, 'nodes', 'classes') + settings = Settings(opts) + storage = get_storage('yaml_fs', nodes_uri, classes_uri, settings.compose_node_name) + return Core(storage, None, settings) + + def test_type_conversion(self): + reclass = self._core('01') + node = reclass.nodeinfo('data_types') + params = { 'int': 1, 'bool': True, 'string': '1', '_reclass_': { 'environment': 'base', 'name': {'full': 'data_types', 'short': 'data_types' } } } + self.assertEqual(node['parameters'], params) + + def test_raise_class_notfound(self): + reclass = self._core('01') + with self.assertRaises(ClassNotFound): + node = reclass.nodeinfo('class_notfound') + + def test_ignore_class_notfound(self): + reclass = self._core('01', opts={ 'ignore_class_notfound': True, 'ignore_class_notfound_warning': False }) + node = reclass.nodeinfo('class_notfound') + params = { 'node_test': 'class not found', '_reclass_': { 'environment': 'base', 'name': {'full': 'class_notfound', 'short': 'class_notfound' } } } + self.assertEqual(node['parameters'], params) + + def test_raise_class_notfound_with_regexp(self): + reclass = self._core('01', opts={ 'ignore_class_notfound': True, 'ignore_class_notfound_warning': False, 'ignore_class_notfound_regexp': 'notmatched.*' }) + with self.assertRaises(ClassNotFound): + node = reclass.nodeinfo('class_notfound') + + def test_ignore_class_notfound_with_regexp(self): + reclass = self._core('01', opts={ 'ignore_class_notfound': True, 'ignore_class_notfound_warning': False, 'ignore_class_notfound_regexp': 'miss.*' }) + node = reclass.nodeinfo('class_notfound') + params = { 'node_test': 'class not found', '_reclass_': { 'environment': 'base', 'name': {'full': 'class_notfound', 'short': 'class_notfound' } } } + self.assertEqual(node['parameters'], params) + + def test_relative_class_names(self): + reclass = self._core('02') + node = reclass.nodeinfo('relative') + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'test5': 5, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, 'alpha_init': 5, '_reclass_': { 'environment': 'base', 'name': { 'full': 'relative', 'short': 'relative' } } } + self.assertEqual(node['parameters'], params) + + def test_top_relative_class_names(self): + reclass = self._core('02') + node = reclass.nodeinfo('top_relative') + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'test5': 5, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, 'alpha_init': 5, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } + self.assertEqual(node['parameters'], params) + + def test_compose_node_names(self): + reclass = self._core('03', {'compose_node_name': True}) + alpha_one_node = reclass.nodeinfo('alpha.one') + alpha_one_res = {'a': 1, 'alpha': [1, 2], 'beta': {'a': 1, 'b': 2}, 'b': 2, '_reclass_': {'environment': 'base', 'name': {'full': 'alpha.one', 'short': 'alpha'}}} + alpha_two_node = reclass.nodeinfo('alpha.two') + alpha_two_res = {'a': 1, 'alpha': [1, 3], 'beta': {'a': 1, 'c': 3}, 'c': 3, '_reclass_': {'environment': 'base', 'name': {'full': 'alpha.two', 'short': 'alpha'}}} + beta_one_node = reclass.nodeinfo('beta.one') + beta_one_res = {'alpha': [2, 3], 'beta': {'c': 3, 'b': 2}, 'b': 2, 'c': 3, '_reclass_': {'environment': 'base', 'name': {'full': 'beta.one', 'short': 'beta'}}} + beta_two_node = reclass.nodeinfo('beta.two') + beta_two_res = {'alpha': [3, 4], 'c': 3, 'beta': {'c': 3, 'd': 4}, 'd': 4, '_reclass_': {'environment': u'base', 'name': {'full': u'beta.two', 'short': u'beta'}}} + self.assertEqual(alpha_one_node['parameters'], alpha_one_res) + self.assertEqual(alpha_two_node['parameters'], alpha_two_res) + self.assertEqual(beta_one_node['parameters'], beta_one_res) + self.assertEqual(beta_two_node['parameters'], beta_two_res) + + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/utils/__init__.py b/reclass/utils/__init__.py index e69de29b..9aaaf25a 100644 --- a/reclass/utils/__init__.py +++ b/reclass/utils/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals diff --git a/reclass/utils/dictpath.py b/reclass/utils/dictpath.py index db95e664..70c7bb51 100644 --- a/reclass/utils/dictpath.py +++ b/reclass/utils/dictpath.py @@ -6,8 +6,13 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals -import types, re +import six +import re class DictPath(object): ''' @@ -56,17 +61,17 @@ class DictPath(object): def __init__(self, delim, contents=None): self._delim = delim + if contents is None: self._parts = [] + elif isinstance(contents, list): + self._parts = contents + elif isinstance(contents, six.string_types): + self._parts = self._split_string(contents) + elif isinstance(contents, tuple): + self._parts = list(contents) else: - if isinstance(contents, types.StringTypes): - self._parts = self._split_string(contents) - elif isinstance(contents, tuple): - self._parts = list(contents) - elif isinstance(contents, list): - self._parts = contents - else: - raise TypeError('DictPath() takes string or list, '\ + raise TypeError('DictPath() takes string or list, '\ 'not %s' % type(contents)) def __repr__(self): @@ -76,11 +81,12 @@ def __str__(self): return self._delim.join(str(i) for i in self._parts) def __eq__(self, other): - if isinstance(other, types.StringTypes): + if not (isinstance(other, six.string_types) or + isinstance(other, self.__class__)): + return False + if isinstance(other, six.string_types): other = DictPath(self._delim, other) - - return self._parts == other._parts \ - and self._delim == other._delim + return self._parts == other._parts and self._delim == other._delim def __ne__(self, other): return not self.__eq__(other) @@ -88,9 +94,9 @@ def __ne__(self, other): def __hash__(self): return hash(str(self)) - def _get_path(self): + @property + def path(self): return self._parts - path = property(_get_path) def _get_key(self): if len(self._parts) == 0: @@ -109,17 +115,55 @@ def _get_innermost_container(self, base): def _split_string(self, string): return re.split(r'(? -# Released under the terms of the Artistic Licence 2.0 -# - -import re - -from reclass.utils.dictpath import DictPath -from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS, \ - PARAMETER_INTERPOLATION_DELIMITER -from reclass.errors import IncompleteInterpolationError, \ - UndefinedVariableError - -_SENTINELS = [re.escape(s) for s in PARAMETER_INTERPOLATION_SENTINELS] -_RE = '{0}\s*(.+?)\s*{1}'.format(*_SENTINELS) - -class RefValue(object): - ''' - Isolates references in string values - - RefValue can be used to isolate and eventually expand references to other - parameters in strings. Those references can then be iterated and rendered - in the context of a dictionary to resolve those references. - - RefValue always gets constructed from a string, because templating - — essentially this is what's going on — is necessarily always about - strings. Therefore, generally, the rendered value of a RefValue instance - will also be a string. - - Nevertheless, as this might not be desirable, RefValue will return the - referenced variable without casting it to a string, if the templated - string contains nothing but the reference itself. - - For instance: - - mydict = {'favcolour': 'yellow', 'answer': 42, 'list': [1,2,3]} - RefValue('My favourite colour is ${favolour}').render(mydict) - → 'My favourite colour is yellow' # a string - - RefValue('The answer is ${answer}').render(mydict) - → 'The answer is 42' # a string - - RefValue('${answer}').render(mydict) - → 42 # an int - - RefValue('${list}').render(mydict) - → [1,2,3] # an list - - The markers used to identify references are set in reclass.defaults, as is - the default delimiter. - ''' - - INTERPOLATION_RE = re.compile(_RE) - - def __init__(self, string, delim=PARAMETER_INTERPOLATION_DELIMITER): - self._strings = [] - self._refs = [] - self._delim = delim - self._parse(string) - - def _parse(self, string): - parts = RefValue.INTERPOLATION_RE.split(string) - self._refs = parts[1:][::2] - self._strings = parts[0:][::2] - self._check_strings(string) - - def _check_strings(self, orig): - for s in self._strings: - pos = s.find(PARAMETER_INTERPOLATION_SENTINELS[0]) - if pos >= 0: - raise IncompleteInterpolationError(orig, - PARAMETER_INTERPOLATION_SENTINELS[1]) - - def _resolve(self, ref, context): - path = DictPath(self._delim, ref) - try: - return path.get_value(context) - except KeyError as e: - raise UndefinedVariableError(ref) - - def has_references(self): - return len(self._refs) > 0 - - def get_references(self): - return self._refs - - def _assemble(self, resolver): - if not self.has_references(): - return self._strings[0] - - if self._strings == ['', '']: - # preserve the type of the referenced variable - return resolver(self._refs[0]) - - # reassemble the string by taking a string and str(ref) pairwise - ret = '' - for i in range(0, len(self._refs)): - ret += self._strings[i] + str(resolver(self._refs[i])) - if len(self._strings) > len(self._refs): - # and finally append a trailing string, if any - ret += self._strings[-1] - return ret - - def render(self, context): - resolver = lambda s: self._resolve(s, context) - return self._assemble(resolver) - - def __repr__(self): - do_not_resolve = lambda s: s.join(PARAMETER_INTERPOLATION_SENTINELS) - return 'RefValue(%r, %r)' % (self._assemble(do_not_resolve), - self._delim) diff --git a/reclass/utils/tests/__init__.py b/reclass/utils/tests/__init__.py index e69de29b..9aaaf25a 100644 --- a/reclass/utils/tests/__init__.py +++ b/reclass/utils/tests/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals diff --git a/reclass/utils/tests/test_dictpath.py b/reclass/utils/tests/test_dictpath.py index 972dc916..6fbb6b7a 100644 --- a/reclass/utils/tests/test_dictpath.py +++ b/reclass/utils/tests/test_dictpath.py @@ -6,6 +6,11 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from reclass.utils.dictpath import DictPath import unittest @@ -64,7 +69,7 @@ def test_repr(self): delim = '%' s = 'a:b\:b:c' p = DictPath(delim, s) - self.assertEqual('%r' % p, 'DictPath(%r, %r)' % (delim, s)) + self.assertEqual('%r' % p, "DictPath(%r, %r)" % (delim, str(s))) def test_str(self): s = 'a:b\:b:c' diff --git a/reclass/values/__init__.py b/reclass/values/__init__.py new file mode 100644 index 00000000..ec0f8822 --- /dev/null +++ b/reclass/values/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals diff --git a/reclass/values/compitem.py b/reclass/values/compitem.py new file mode 100644 index 00000000..c262f277 --- /dev/null +++ b/reclass/values/compitem.py @@ -0,0 +1,30 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# + +from reclass.settings import Settings +from reclass.values import item + + +class CompItem(item.ItemWithReferences): + + type = item.ItemTypes.COMPOSITE + + def merge_over(self, other): + if (other.type == item.ItemTypes.SCALAR or + other.type == item.ItemTypes.COMPOSITE): + return self + raise RuntimeError('Failed to merge %s over %s' % (self, other)) + + def render(self, context, inventory): + # Preserve type if only one item + if len(self.contents) == 1: + return self.contents[0].render(context, inventory) + # Multiple items + strings = [str(i.render(context, inventory)) for i in self.contents] + return "".join(strings) + + def __str__(self): + return ''.join([str(i) for i in self.contents]) diff --git a/reclass/values/dictitem.py b/reclass/values/dictitem.py new file mode 100644 index 00000000..0648a39e --- /dev/null +++ b/reclass/values/dictitem.py @@ -0,0 +1,12 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# + +from reclass.values import item + + +class DictItem(item.ContainerItem): + + type = item.ItemTypes.DICTIONARY diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py new file mode 100644 index 00000000..adb1cb6c --- /dev/null +++ b/reclass/values/invitem.py @@ -0,0 +1,243 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import copy +import itertools as it +import operator +import pyparsing as pp + +from six import iteritems +from six import string_types + +from reclass.values import item +from reclass.values import parser_funcs +from reclass.settings import Settings +from reclass.utils.dictpath import DictPath +from reclass.errors import ExpressionError, ParseError, ResolveError + + +# TODO: generalize expression handling. +class BaseTestExpression(object): + + known_operators = {} + def __init__(self, delimiter): + self._delimiter = delimiter + self.refs = [] + self.inv_refs = [] + + +class EqualityTest(BaseTestExpression): + + known_operators = { parser_funcs.EQUAL: operator.eq, + parser_funcs.NOT_EQUAL: operator.ne} + + def __init__(self, expression, delimiter): + # expression is a list of at least three tuples, of which first element + # is a string tag, second is subelement value; other tuples apparently + # are not used. + # expression[0][1] effectively contains export path and apparently must + # be treated as such, also left hand operand in comparison + # expression[1][1] appa holds commparison operator == or != + # expression[2][1] is the righhand operand + super(EqualityTest, self).__init__(delimiter) + # TODO: this double sommersault must be cleaned + _ = self._get_vars(expression[2][1], *self._get_vars(expression[0][1])) + self._export_path, self._parameter_path, self._parameter_value = _ + try: + self._export_path.drop_first() + except AttributeError: + raise ExpressionError('No export') + try: + self._compare = self.known_operators[expression[1][1]] + except KeyError as e: + msg = 'Unknown test {0}'.format(expression[1][1]) + raise ExpressionError(msg, tbFlag=False) + self.inv_refs = [self._export_path] + if self._parameter_path is not None: + self._parameter_path.drop_first() + self.refs = [str(self._parameter_path)] + + def value(self, context, items): + if self._parameter_path is not None: + self._parameter_value = self._resolve(self._parameter_path, + context) + if self._parameter_value is None: + raise ExpressionError('Failed to render %s' % str(self), + tbFlag=False) + if self._export_path.exists_in(items): + export_value = self._resolve(self._export_path, items) + return self._compare(export_value, self._parameter_value) + return False + + def _resolve(self, path, dictionary): + try: + return path.get_value(dictionary) + except KeyError as e: + raise ResolveError(str(path)) + + def _get_vars(self, var, export=None, parameter=None, value=None): + if isinstance(var, string_types): + path = DictPath(self._delimiter, var) + if path.path[0].lower() == 'exports': + export = path + elif path.path[0].lower() == 'self': + parameter = path + elif path.path[0].lower() == 'true': + value = True + elif path.path[0].lower() == 'false': + value = False + else: + value = var + else: + value = var + return export, parameter, value + + +class LogicTest(BaseTestExpression): + + known_operators = { parser_funcs.AND: operator.and_, + parser_funcs.OR: operator.or_} + + def __init__(self, expr, delimiter): + super(LogicTest, self).__init__(delimiter) + subtests = list(it.compress(expr, it.cycle([1, 1, 1, 0]))) + self._els = [EqualityTest(subtests[j:j+3], self._delimiter) + for j in range(0, len(subtests), 3)] + for x in self._els: + self.refs.extend(x.refs) + self.inv_refs.extend(x.inv_refs) + try: + self._ops = [self.known_operators[x[1]] for x in expr[3::4]] + except KeyError as e: + msg = 'Unknown operator {0} {1}'.format(e.messsage, self._els) + raise ExpressionError(msg, tbFlag=False) + + def value(self, context, items): + if len(self._els) == 0: # NOTE: possible logic error + return True + result = self._els[0].value(context, items) + for op, next_el in zip(self._ops, self._els[1:]): + result = op(result, next_el.value(context, items)) + return result + + +class InvItem(item.Item): + + type = item.ItemTypes.INV_QUERY + + def __init__(self, newitem, settings): + super(InvItem, self).__init__(newitem.render(None, None), settings) + self.needs_all_envs = False + self.has_inv_query = True + self.ignore_failed_render = ( + self._settings.inventory_ignore_failed_render) + self._parse_expression(self.contents) + + def _parse_expression(self, expr): + parser = parser_funcs.get_expression_parser() + try: + tokens = parser.parseString(expr).asList() + except pp.ParseException as e: + raise ParseError(e.msg, e.line, e.col, e.lineno) + + if len(tokens) == 2: # options are set + passed_opts = [x[1] for x in tokens.pop(0)] + self.ignore_failed_render = parser_funcs.IGNORE_ERRORS in passed_opts + self.needs_all_envs = parser_funcs.ALL_ENVS in passed_opts + elif len(tokens) > 2: + raise ExpressionError('Failed to parse %s' % str(tokens), + tbFlag=False) + self._expr_type = tokens[0][0] + self._expr = list(tokens[0][1]) + + if self._expr_type == parser_funcs.VALUE: + self._value_path = DictPath(self._settings.delimiter, + self._expr[0][1]).drop_first() + self._question = LogicTest([], self._settings.delimiter) + self.refs = [] + self.inv_refs = [self._value_path] + elif self._expr_type == parser_funcs.TEST: + self._value_path = DictPath(self._settings.delimiter, + self._expr[0][1]).drop_first() + self._question = LogicTest(self._expr[2:], self._settings.delimiter) + self.refs = self._question.refs + self.inv_refs = self._question.inv_refs + self.inv_refs.append(self._value_path) + elif self._expr_type == parser_funcs.LIST_TEST: + self._value_path = None + self._question = LogicTest(self._expr[1:], self._settings.delimiter) + self.refs = self._question.refs + self.inv_refs = self._question.inv_refs + else: + msg = 'Unknown expression type: %s' + raise ExpressionError(msg % self._expr_type, tbFlag=False) + + @property + def has_references(self): + return len(self._question.refs) > 0 + + def get_references(self): + return self._question.refs + + def assembleRefs(self, context): + return + + def get_inv_references(self): + return self.inv_refs + + def _resolve(self, path, dictionary): + try: + return path.get_value(dictionary) + except KeyError as e: + raise ResolveError(str(path)) + + def _value_expression(self, inventory): + results = {} + for (node, items) in iteritems(inventory): + if self._value_path.exists_in(items): + results[node] = copy.deepcopy(self._resolve(self._value_path, + items)) + return results + + def _test_expression(self, context, inventory): + if self._value_path is None: + msg = 'Failed to render %s' + raise ExpressionError(msg % str(self), tbFlag=False) + + results = {} + for node, items in iteritems(inventory): + if (self._question.value(context, items) and + self._value_path.exists_in(items)): + results[node] = copy.deepcopy( + self._resolve(self._value_path, items)) + return results + + def _list_test_expression(self, context, inventory): + results = [] + for (node, items) in iteritems(inventory): + if self._question.value(context, items): + results.append(node) + return results + + def render(self, context, inventory): + if self._expr_type == parser_funcs.VALUE: + return self._value_expression(inventory) + elif self._expr_type == parser_funcs.TEST: + return self._test_expression(context, inventory) + elif self._expr_type == parser_funcs.LIST_TEST: + return self._list_test_expression(context, inventory) + raise ExpressionError('Failed to render %s' % str(self), tbFlag=False) + + def __str__(self): + return ' '.join(str(j) for i,j in self._expr) + + def __repr__(self): + # had to leave it here for now as the behaviour differs from basic + return 'InvItem(%r)' % self._expr diff --git a/reclass/values/item.py b/reclass/values/item.py new file mode 100644 index 00000000..45aeb77b --- /dev/null +++ b/reclass/values/item.py @@ -0,0 +1,92 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from enum import Enum + +from reclass.utils.dictpath import DictPath + +ItemTypes = Enum('ItemTypes', + ['COMPOSITE', 'DICTIONARY', 'INV_QUERY', 'LIST', + 'REFERENCE', 'SCALAR']) + + +class Item(object): + + def __init__(self, item, settings): + self._settings = settings + self.contents = item + self.has_inv_query = False + + def allRefs(self): + return True + + @property + def has_references(self): + return False + + def is_container(self): + return False + + @property + def is_complex(self): + return (self.has_references | self.has_inv_query) + + def merge_over(self, item): + msg = "Item class {0} does not implement merge_over()" + raise NotImplementedError(msg.format(self.__class__.__name__)) + + def render(self, context, exports): + msg = "Item class {0} does not implement render()" + raise NotImplementedError(msg.format(self.__class__.__name__)) + + def type_str(self): + return self.type.name.lower() + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.contents) + + +class ItemWithReferences(Item): + + def __init__(self, items, settings): + super(ItemWithReferences, self).__init__(items, settings) + try: + iter(self.contents) + except TypeError: + self.contents = [self.contents] + self.assembleRefs() + + @property + def has_references(self): + return len(self._refs) > 0 + + def get_references(self): + return self._refs + + # NOTE: possibility of confusion. Looks like 'assemble' should be either + # 'gather' or 'extract'. + def assembleRefs(self, context={}): + self._refs = [] + self.allRefs = True + for item in self.contents: + if item.has_references: + item.assembleRefs(context) + self._refs.extend(item.get_references()) + if item.allRefs is False: + self.allRefs = False + + +class ContainerItem(Item): + + def is_container(self): + return True + + def render(self, context, inventory): + return self.contents diff --git a/reclass/values/listitem.py b/reclass/values/listitem.py new file mode 100644 index 00000000..24bece1d --- /dev/null +++ b/reclass/values/listitem.py @@ -0,0 +1,18 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# + +from reclass.values import item + + +class ListItem(item.ContainerItem): + + type = item.ItemTypes.LIST + + def merge_over(self, other): + if other.type == item.ItemTypes.LIST: + other.contents.extend(self.contents) + return other + raise RuntimeError('Failed to merge %s over %s' % (self, other)) diff --git a/reclass/values/parser.py b/reclass/values/parser.py new file mode 100644 index 00000000..3f7ac1f7 --- /dev/null +++ b/reclass/values/parser.py @@ -0,0 +1,89 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import pyparsing as pp + +from .compitem import CompItem +from .invitem import InvItem +from .refitem import RefItem +from .scaitem import ScaItem + +from reclass.errors import ParseError +from reclass.values.parser_funcs import tags +import reclass.values.parser_funcs as parsers + +import collections +import six + + +class Parser(object): + + def __init__(self): + self._ref_parser = None + self._simple_parser = None + self._old_settings = None + + @property + def ref_parser(self): + if self._ref_parser is None or self._settings != self._old_settings: + self._ref_parser = parsers.get_ref_parser(self._settings) + self._old_settings = self._settings + return self._ref_parser + + @property + def simple_ref_parser(self): + if self._simple_parser is None or self._settings != self._old_settings: + self._simple_parser = parsers.get_simple_ref_parser(self._settings) + self._old_settings = self._settings + return self._simple_parser + + def parse(self, value, settings): + def full_parse(): + try: + return self.ref_parser.parseString(value) + except pp.ParseException as e: + raise ParseError(e.msg, e.line, e.col, e.lineno) + + self._settings = settings + sentinel_count = (value.count(settings.reference_sentinels[0]) + + value.count(settings.export_sentinels[0])) + if sentinel_count == 0: + # speed up: only use pyparsing if there are sentinels in the value + return ScaItem(value, self._settings) + elif sentinel_count == 1: # speed up: try a simple reference + try: + tokens = self.simple_ref_parser.parseString(value) + except pp.ParseException: + tokens = full_parse() # fall back on the full parser + else: + tokens = full_parse() # use the full parser + + tokens = parsers.listify(tokens) + items = self._create_items(tokens) + if len(items) == 1: + return items[0] + return CompItem(items, self._settings) + + _item_builders = {tags.STR: (lambda s, v: ScaItem(v, s._settings)), + tags.REF: (lambda s, v: s._create_ref(v)), + tags.INV: (lambda s, v: s._create_inv(v)) } + + def _create_items(self, tokens): + return [self._item_builders[t](self, v) for t, v in tokens ] + + def _create_ref(self, tokens): + items = [ self._item_builders[t](self, v) for t, v in tokens ] + return RefItem(items, self._settings) + + def _create_inv(self, tokens): + items = [ScaItem(v, self._settings) for t, v in tokens] + if len(items) == 1: + return InvItem(items[0], self._settings) + return InvItem(CompItem(items), self._settings) diff --git a/reclass/values/parser_funcs.py b/reclass/values/parser_funcs.py new file mode 100644 index 00000000..db34cebc --- /dev/null +++ b/reclass/values/parser_funcs.py @@ -0,0 +1,178 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import collections +import enum +import functools +import pyparsing as pp +import six + +tags = enum.Enum('Tags', ['STR', 'REF', 'INV']) + +_OBJ = 'OBJ' +_LOGICAL = 'LOGICAL' +_OPTION = 'OPTION' +_IF = 'IF' + +TEST = 'TEST' +LIST_TEST = 'LIST_TEST' + +VALUE = 'VALUE' +AND = 'AND' +OR = 'OR' + +EQUAL = '==' +NOT_EQUAL = '!=' + +IGNORE_ERRORS = '+IgnoreErrors' +ALL_ENVS = '+AllEnvs' + + +s_end = pp.StringEnd() + +def _tag_with(tag, transform=lambda x:x): + def inner(tag, string, location, tokens): + token = transform(tokens[0]) + tokens[0] = (tag, token) + return functools.partial(inner, tag) + +def _asList(x): + if isinstance(x, pp.ParseResults): + return x.asList() + return x + +def listify(w, modifier=_asList): + if (isinstance(w, collections.Iterable) and + not isinstance(w, six.string_types)): + cls = type(w) + if cls == pp.ParseResults: + cls = list + return cls([listify(x) for x in w]) + return modifier(w) + +def get_expression_parser(): + sign = pp.Optional(pp.Literal('-')) + number = pp.Word(pp.nums) + dpoint = pp.Literal('.') + ignore_errors = pp.CaselessLiteral(IGNORE_ERRORS) + all_envs = pp.CaselessLiteral(ALL_ENVS) + eq, neq = pp.Literal(EQUAL), pp.Literal(NOT_EQUAL) + eand, eor = pp.CaselessLiteral(AND), pp.CaselessLiteral(OR) + + option = (ignore_errors | all_envs).setParseAction(_tag_with(_OPTION)) + options = pp.Group(pp.ZeroOrMore(option)) + operator_test = (eq | neq).setParseAction(_tag_with(TEST)) + operator_logical = (eand | eor).setParseAction(_tag_with(_LOGICAL)) + begin_if = pp.CaselessLiteral(_IF).setParseAction(_tag_with(_IF)) + obj = pp.Word(pp.printables).setParseAction(_tag_with(_OBJ)) + + integer = pp.Combine(sign + number + pp.WordEnd()).setParseAction( + _tag_with(_OBJ, int)) + real = pp.Combine(sign + + ((number + dpoint + number) | + (dpoint + number) | + (number + dpoint)) + ).setParseAction(_tag_with(_OBJ, float)) + expritem = integer | real | obj + single_test = expritem + operator_test + expritem + additional_test = operator_logical + single_test + + expr_var = pp.Group(obj + s_end).setParseAction(_tag_with(VALUE)) + expr_test = pp.Group(obj + begin_if + single_test + + pp.ZeroOrMore(additional_test) + + s_end).setParseAction(_tag_with(TEST)) + expr_list_test = pp.Group(begin_if + single_test + + pp.ZeroOrMore(additional_test) + + s_end).setParseAction(_tag_with(LIST_TEST)) + expr = expr_test | expr_var | expr_list_test + line = options + expr + s_end + return line + +def get_ref_parser(settings): + _ESCAPE = settings.escape_character + _DOUBLE_ESCAPE = _ESCAPE + _ESCAPE + + _REF_OPEN, _REF_CLOSE = settings.reference_sentinels + _REF_CLOSE_FIRST = _REF_CLOSE[0] + _REF_ESCAPE_OPEN = _ESCAPE + _REF_OPEN + _REF_ESCAPE_CLOSE = _ESCAPE + _REF_CLOSE + _REF_DOUBLE_ESCAPE_OPEN = _DOUBLE_ESCAPE + _REF_OPEN + _REF_DOUBLE_ESCAPE_CLOSE = _DOUBLE_ESCAPE + _REF_CLOSE + _REF_EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE + + _INV_OPEN, _INV_CLOSE = settings.export_sentinels + _INV_CLOSE_FIRST = _INV_CLOSE[0] + _INV_ESCAPE_OPEN = _ESCAPE + _INV_OPEN + _INV_ESCAPE_CLOSE = _ESCAPE + _INV_CLOSE + _INV_DOUBLE_ESCAPE_OPEN = _DOUBLE_ESCAPE + _INV_OPEN + _INV_DOUBLE_ESCAPE_CLOSE = _DOUBLE_ESCAPE + _INV_CLOSE + _INV_EXCLUDES = _ESCAPE + _INV_OPEN + _INV_CLOSE + + _EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE + _INV_OPEN + _INV_CLOSE + + double_escape = pp.Combine(pp.Literal(_DOUBLE_ESCAPE) + + pp.MatchFirst([pp.FollowedBy(_REF_OPEN), + pp.FollowedBy(_REF_CLOSE), + pp.FollowedBy(_INV_OPEN), + pp.FollowedBy(_INV_CLOSE)])).setParseAction( + pp.replaceWith(_ESCAPE)) + + ref_open = pp.Literal(_REF_OPEN).suppress() + ref_close = pp.Literal(_REF_CLOSE).suppress() + ref_not_open = ~pp.Literal(_REF_OPEN) + ~pp.Literal(_REF_ESCAPE_OPEN) + ~pp.Literal(_REF_DOUBLE_ESCAPE_OPEN) + ref_not_close = ~pp.Literal(_REF_CLOSE) + ~pp.Literal(_REF_ESCAPE_CLOSE) + ~pp.Literal(_REF_DOUBLE_ESCAPE_CLOSE) + ref_escape_open = pp.Literal(_REF_ESCAPE_OPEN).setParseAction(pp.replaceWith(_REF_OPEN)) + ref_escape_close = pp.Literal(_REF_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_REF_CLOSE)) + ref_text = pp.CharsNotIn(_REF_EXCLUDES) | pp.CharsNotIn(_REF_CLOSE_FIRST, exact=1) + ref_content = pp.Combine(pp.OneOrMore(ref_not_open + ref_not_close + ref_text)) + ref_string = pp.MatchFirst([double_escape, ref_escape_open, ref_escape_close, ref_content]).setParseAction(_tag_with(tags.STR)) + ref_item = pp.Forward() + ref_items = pp.OneOrMore(ref_item) + reference = (ref_open + pp.Group(ref_items) + ref_close).setParseAction(_tag_with(tags.REF)) + ref_item << (reference | ref_string) + + inv_open = pp.Literal(_INV_OPEN).suppress() + inv_close = pp.Literal(_INV_CLOSE).suppress() + inv_not_open = ~pp.Literal(_INV_OPEN) + ~pp.Literal(_INV_ESCAPE_OPEN) + ~pp.Literal(_INV_DOUBLE_ESCAPE_OPEN) + inv_not_close = ~pp.Literal(_INV_CLOSE) + ~pp.Literal(_INV_ESCAPE_CLOSE) + ~pp.Literal(_INV_DOUBLE_ESCAPE_CLOSE) + inv_escape_open = pp.Literal(_INV_ESCAPE_OPEN).setParseAction(pp.replaceWith(_INV_OPEN)) + inv_escape_close = pp.Literal(_INV_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_INV_CLOSE)) + inv_text = pp.CharsNotIn(_INV_CLOSE_FIRST) + inv_content = pp.Combine(pp.OneOrMore(inv_not_close + inv_text)) + inv_string = pp.MatchFirst( + [double_escape, inv_escape_open, inv_escape_close, inv_content] + ).setParseAction(_tag_with(tags.STR)) + inv_items = pp.OneOrMore(inv_string) + export = (inv_open + pp.Group(inv_items) + inv_close).setParseAction(_tag_with(tags.INV)) + + text = pp.CharsNotIn(_EXCLUDES) | pp.CharsNotIn('', exact=1) + content = pp.Combine(pp.OneOrMore(ref_not_open + inv_not_open + text)) + string = pp.MatchFirst( + [double_escape, ref_escape_open, inv_escape_open, content] + ).setParseAction(_tag_with(tags.STR)) + + item = reference | export | string + line = pp.OneOrMore(item) + s_end + return line.leaveWhitespace() + + +def get_simple_ref_parser(settings): + + ESCAPE = settings.escape_character + REF_OPEN, REF_CLOSE = settings.reference_sentinels + INV_OPEN, INV_CLOSE = settings.export_sentinels + EXCLUDES = ESCAPE + REF_OPEN + REF_CLOSE + INV_OPEN + INV_CLOSE + + string = pp.CharsNotIn(EXCLUDES).setParseAction(_tag_with(tags.STR)) + ref_open = pp.Literal(REF_OPEN).suppress() + ref_close = pp.Literal(REF_CLOSE).suppress() + reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_tag_with(tags.REF)) + line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + s_end + return line.leaveWhitespace() diff --git a/reclass/values/refitem.py b/reclass/values/refitem.py new file mode 100644 index 00000000..64bf4503 --- /dev/null +++ b/reclass/values/refitem.py @@ -0,0 +1,43 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# + +from reclass.values import item +from reclass.utils.dictpath import DictPath +from reclass.errors import ResolveError + + +class RefItem(item.ItemWithReferences): + + type = item.ItemTypes.REFERENCE + + def assembleRefs(self, context={}): + super(RefItem, self).assembleRefs(context) + try: + self._refs.append(self._flatten_contents(context)) + except ResolveError as e: + self.allRefs = False + + def _flatten_contents(self, context, inventory=None): + result = [str(i.render(context, inventory)) for i in self.contents] + return "".join(result) + + def _resolve(self, ref, context): + path = DictPath(self._settings.delimiter, ref) + try: + return path.get_value(context) + except (KeyError, TypeError) as e: + raise ResolveError(ref) + + def render(self, context, inventory): + #strings = [str(i.render(context, inventory)) for i in self.contents] + #return self._resolve("".join(strings), context) + return self._resolve(self._flatten_contents(context, inventory), + context) + + def __str__(self): + strings = [str(i) for i in self.contents] + rs = self._settings.reference_sentinels + return '{0}{1}{2}'.format(rs[0], ''.join(strings), rs[1]) diff --git a/reclass/values/scaitem.py b/reclass/values/scaitem.py new file mode 100644 index 00000000..1bcbd2c9 --- /dev/null +++ b/reclass/values/scaitem.py @@ -0,0 +1,31 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass.settings import Settings +from reclass.values import item + + +class ScaItem(item.Item): + + type = item.ItemTypes.SCALAR + + def __init__(self, value, settings): + super(ScaItem, self).__init__(value, settings) + + def merge_over(self, other): + if other.type in [item.ItemTypes.SCALAR, item.ItemTypes.COMPOSITE]: + return self + raise RuntimeError('Failed to merge %s over %s' % (self, other)) + + def render(self, context, inventory): + return self.contents + + def __str__(self): + return str(self.contents) diff --git a/reclass/values/tests/__init__.py b/reclass/values/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/reclass/values/tests/test_compitem.py b/reclass/values/tests/test_compitem.py new file mode 100644 index 00000000..c3ee6909 --- /dev/null +++ b/reclass/values/tests/test_compitem.py @@ -0,0 +1,126 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +import unittest + +SETTINGS = Settings() + +class TestCompItem(unittest.TestCase): + + def test_assembleRefs_no_items(self): + composite = CompItem([], SETTINGS) + + self.assertFalse(composite.has_references) + + def test_assembleRefs_one_item_without_refs(self): + val1 = Value('foo', SETTINGS, '') + + composite = CompItem([val1], SETTINGS) + + self.assertFalse(composite.has_references) + + def test_assembleRefs_one_item_with_one_ref(self): + val1 = Value('${foo}', SETTINGS, '') + expected_refs = ['foo'] + + composite = CompItem([val1], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_one_item_with_two_refs(self): + val1 = Value('${foo}${bar}', SETTINGS, '') + expected_refs = ['foo', 'bar'] + + composite = CompItem([val1], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_two_items_one_with_one_ref_one_without(self): + val1 = Value('${foo}bar', SETTINGS, '') + val2 = Value('baz', SETTINGS, '') + expected_refs = ['foo'] + + composite = CompItem([val1, val2], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_two_items_both_with_one_ref(self): + val1 = Value('${foo}', SETTINGS, '') + val2 = Value('${bar}', SETTINGS, '') + expected_refs = ['foo', 'bar'] + + composite = CompItem([val1, val2], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_two_items_with_two_refs(self): + val1 = Value('${foo}${baz}', SETTINGS, '') + val2 = Value('${bar}${meep}', SETTINGS, '') + expected_refs = ['foo', 'baz', 'bar', 'meep'] + + composite = CompItem([val1, val2], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_string_representation(self): + composite = CompItem(Value(1, SETTINGS, ''), SETTINGS) + expected = '1' + + result = str(composite) + + self.assertEquals(result, expected) + + def test_render_single_item(self): + val1 = Value('${foo}', SETTINGS, '') + + composite = CompItem([val1], SETTINGS) + + self.assertEquals(1, composite.render({'foo': 1}, None)) + + + def test_render_multiple_items(self): + val1 = Value('${foo}', SETTINGS, '') + val2 = Value('${bar}', SETTINGS, '') + + composite = CompItem([val1, val2], SETTINGS) + + self.assertEquals('12', composite.render({'foo': 1, 'bar': 2}, None)) + + def test_merge_over_merge_scalar(self): + val1 = Value(None, SETTINGS, '') + scalar = ScaItem(1, SETTINGS) + composite = CompItem([val1], SETTINGS) + + result = composite.merge_over(scalar) + + self.assertEquals(result, composite) + + def test_merge_over_merge_composite(self): + val1 = Value(None, SETTINGS, '') + val2 = Value(None, SETTINGS, '') + composite1 = CompItem([val1], SETTINGS) + composite2 = CompItem([val2], SETTINGS) + + result = composite2.merge_over(composite1) + + self.assertEquals(result, composite2) + + def test_merge_other_types_not_allowed(self): + other = type('Other', (object,), {'type': 34}) + val1 = Value(None, SETTINGS, '') + composite = CompItem([val1], SETTINGS) + + self.assertRaises(RuntimeError, composite.merge_over, other) + + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_item.py b/reclass/values/tests/test_item.py new file mode 100644 index 00000000..4b91f6e6 --- /dev/null +++ b/reclass/values/tests/test_item.py @@ -0,0 +1,48 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +from reclass.values.item import ContainerItem +from reclass.values.item import ItemWithReferences +import unittest +from mock import MagicMock + +SETTINGS = Settings() + + +class TestItemWithReferences(unittest.TestCase): + + def test_assembleRef_allrefs(self): + phonyitem = MagicMock() + phonyitem.has_references = True + phonyitem.get_references = lambda *x: [1] + + iwr = ItemWithReferences([phonyitem], {}) + + self.assertEquals(iwr.get_references(), [1]) + self.assertTrue(iwr.allRefs) + + def test_assembleRef_partial(self): + phonyitem = MagicMock() + phonyitem.has_references = True + phonyitem.allRefs = False + phonyitem.get_references = lambda *x: [1] + + iwr = ItemWithReferences([phonyitem], {}) + + self.assertEquals(iwr.get_references(), [1]) + self.assertFalse(iwr.allRefs) + + +class TestContainerItem(unittest.TestCase): + + def test_render(self): + container = ContainerItem('foo', SETTINGS) + + self.assertEquals(container.render(None, None), 'foo') + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_listitem.py b/reclass/values/tests/test_listitem.py new file mode 100644 index 00000000..618b7797 --- /dev/null +++ b/reclass/values/tests/test_listitem.py @@ -0,0 +1,31 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +import unittest + +SETTINGS = Settings() + +class TestListItem(unittest.TestCase): + + def test_merge_over_merge_list(self): + listitem1 = ListItem([1], SETTINGS) + listitem2 = ListItem([2], SETTINGS) + expected = ListItem([1, 2], SETTINGS) + + result = listitem2.merge_over(listitem1) + + self.assertEquals(result.contents, expected.contents) + + def test_merge_other_types_not_allowed(self): + other = type('Other', (object,), {'type': 34}) + val1 = Value(None, SETTINGS, '') + listitem = ListItem(val1, SETTINGS) + + self.assertRaises(RuntimeError, listitem.merge_over, other) + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_parser_functions.py b/reclass/values/tests/test_parser_functions.py new file mode 100644 index 00000000..a660c76c --- /dev/null +++ b/reclass/values/tests/test_parser_functions.py @@ -0,0 +1,116 @@ +from reclass import settings +from reclass.values import parser_funcs as pf +import unittest +import ddt + + +SETTINGS = settings.Settings() + +# Test cases for parsers. Each test case is a two-tuple of input string and +# expected output. NOTE: default values for sentinels are used here to avoid +# cluttering up the code. +test_pairs_simple = ( + # Basic test cases. + ('${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + # Basic combinations. + ('bar${foo}', [(pf.tags.STR, 'bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')])]), + ('bar${foo}baz', [(pf.tags.STR, 'bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + ('${foo}baz', [(pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + # Whitespace preservation cases. + ('bar ${foo}', [(pf.tags.STR, 'bar '), + (pf.tags.REF, [(pf.tags.STR, 'foo')])]), + ('bar ${foo baz}', [(pf.tags.STR, 'bar '), + (pf.tags.REF, [(pf.tags.STR, 'foo baz')])]), + ('bar${foo} baz', [(pf.tags.STR, 'bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, ' baz')]), + (' bar${foo} baz ', [(pf.tags.STR, ' bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, ' baz ')]), +) + +# Simple parser test cases are also included in this test grouop. +test_pairs_full = ( + # Single elements sanity. + ('foo', [(pf.tags.STR, 'foo')]), + ('$foo', [(pf.tags.STR, '$foo')]), + ('{foo}', [(pf.tags.STR, '{foo}')]), + ('[foo]', [(pf.tags.STR, '[foo]')]), + ('$(foo)', [(pf.tags.STR, '$(foo)')]), + ('$[foo]', [(pf.tags.INV, [(pf.tags.STR, 'foo')])]), + + # Escape sequences. + # NOTE: these sequences apparently are not working as expected. + #(r'\\\\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + #(r'\\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + #(r'\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + + # Basic combinations. + ('bar$[foo]', [(pf.tags.STR, 'bar'), + (pf.tags.INV, [(pf.tags.STR, 'foo')])]), + ('bar$[foo]baz', [(pf.tags.STR, 'bar'), + (pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + ('$[foo]baz', [(pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + + # Whitespace preservation in various positions. + (' foo ', [(pf.tags.STR, ' foo ')]), + ('foo bar', [(pf.tags.STR, 'foo bar')]), + ('bar $[foo baz]', [(pf.tags.STR, 'bar '), + (pf.tags.INV, [(pf.tags.STR, 'foo baz')])]), + ('bar$[foo] baz ', [(pf.tags.STR, 'bar'), + (pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, ' baz ')]), + + # Nested references and inventory items. + ('${foo}${bar}',[(pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.REF, [(pf.tags.STR, 'bar')])]), + ('${foo${bar}}',[(pf.tags.REF, [(pf.tags.STR, 'foo'), + (pf.tags.REF, [(pf.tags.STR, 'bar')])])]), + ('$[foo]$[bar]',[(pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.INV, [(pf.tags.STR, 'bar')])]), + # NOTE: the cases below do not work as expected, which is probably a bug. + # Any nesting in INV creates a string. + #('${$[foo]}', [(pf.tags.REF, [(pf.tags.INV, [(pf.tags.STR, 'foo')])])]), + #('$[${foo}]', [(pf.tags.INV, [(pf.tags.REF, [(pf.tags.STR, 'foo')])])]), + #('$[foo$[bar]]',[(pf.tags.INV, [(pf.tags.STR, 'foo'), + # (pf.tags.INV, [(pf.tags.STR, 'bar')])])]), + +) + test_pairs_simple + + +@ddt.ddt +class TestRefParser(unittest.TestCase): + + @ddt.data(*test_pairs_full) + def test_standard_reference_parser(self, data): + instring, expected = data + parser = pf.get_ref_parser(SETTINGS) + + result = pf.listify(parser.parseString(instring).asList()) + + self.assertEquals(expected, result) + + +@ddt.ddt +class TestSimpleRefParser(unittest.TestCase): + + @ddt.data(*test_pairs_simple) + def test_standard_reference_parser(self, data): + # NOTE: simple reference parser can parse references only. It fails + # on inventory items. + instring, expected = data + parser = pf.get_simple_ref_parser(SETTINGS) + + result = pf.listify(parser.parseString(instring).asList()) + + self.assertEquals(expected, result) + + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_refitem.py b/reclass/values/tests/test_refitem.py new file mode 100644 index 00000000..65814782 --- /dev/null +++ b/reclass/values/tests/test_refitem.py @@ -0,0 +1,57 @@ +from reclass import errors + +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +from reclass.values.refitem import RefItem +import unittest +from mock import MagicMock + +SETTINGS = Settings() + +class TestRefItem(unittest.TestCase): + + def test_assembleRefs_ok(self): + phonyitem = MagicMock() + phonyitem.render = lambda x, k: 'bar' + phonyitem.has_references = True + phonyitem.get_references = lambda *x: ['foo'] + + iwr = RefItem([phonyitem], {}) + + self.assertEquals(iwr.get_references(), ['foo', 'bar']) + self.assertTrue(iwr.allRefs) + + def test_assembleRefs_failedrefs(self): + phonyitem = MagicMock() + phonyitem.render.side_effect = errors.ResolveError('foo') + phonyitem.has_references = True + phonyitem.get_references = lambda *x: ['foo'] + + iwr = RefItem([phonyitem], {}) + + self.assertEquals(iwr.get_references(), ['foo']) + self.assertFalse(iwr.allRefs) + + def test__resolve_ok(self): + reference = RefItem('', Settings({'delimiter': ':'})) + + result = reference._resolve('foo:bar', {'foo':{'bar': 1}}) + + self.assertEquals(result, 1) + + def test__resolve_fails(self): + refitem = RefItem('', Settings({'delimiter': ':'})) + context = {'foo':{'bar': 1}} + reference = 'foo:baz' + + self.assertRaises(errors.ResolveError, refitem._resolve, reference, + context) + + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_scaitem.py b/reclass/values/tests/test_scaitem.py new file mode 100644 index 00000000..b6d038de --- /dev/null +++ b/reclass/values/tests/test_scaitem.py @@ -0,0 +1,38 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +import unittest + +SETTINGS = Settings() + +class TestScaItem(unittest.TestCase): + + def test_merge_over_merge_scalar(self): + scalar1 = ScaItem([1], SETTINGS) + scalar2 = ScaItem([2], SETTINGS) + + result = scalar2.merge_over(scalar1) + + self.assertEquals(result.contents, scalar2.contents) + + def test_merge_over_merge_composite(self): + scalar1 = CompItem(Value(1, SETTINGS, ''), SETTINGS) + scalar2 = ScaItem([2], SETTINGS) + + result = scalar2.merge_over(scalar1) + + self.assertEquals(result.contents, scalar2.contents) + + def test_merge_other_types_not_allowed(self): + other = type('Other', (object,), {'type': 34}) + val1 = Value(None, SETTINGS, '') + scalar = ScaItem(val1, SETTINGS) + + self.assertRaises(RuntimeError, scalar.merge_over, other) + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/utils/tests/test_refvalue.py b/reclass/values/tests/test_value.py similarity index 55% rename from reclass/utils/tests/test_refvalue.py rename to reclass/values/tests/test_value.py index 23d7e7b0..a06d2207 100644 --- a/reclass/utils/tests/test_refvalue.py +++ b/reclass/values/tests/test_value.py @@ -6,17 +6,21 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # - -from reclass.utils.refvalue import RefValue -from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS, \ - PARAMETER_INTERPOLATION_DELIMITER -from reclass.errors import UndefinedVariableError, \ - IncompleteInterpolationError +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.errors import ResolveError, ParseError import unittest +SETTINGS = Settings() + def _var(s): - return '%s%s%s' % (PARAMETER_INTERPOLATION_SENTINELS[0], s, - PARAMETER_INTERPOLATION_SENTINELS[1]) + return '%s%s%s' % (SETTINGS.reference_sentinels[0], s, + SETTINGS.reference_sentinels[1]) CONTEXT = {'favcolour':'yellow', 'motd':{'greeting':'Servus!', @@ -31,19 +35,19 @@ def _var(s): def _poor_mans_template(s, var, value): return s.replace(_var(var), value) -class TestRefValue(unittest.TestCase): +class TestValue(unittest.TestCase): def test_simple_string(self): s = 'my cat likes to hide in boxes' - tv = RefValue(s) - self.assertFalse(tv.has_references()) - self.assertEquals(tv.render(CONTEXT), s) + tv = Value(s, SETTINGS, '') + self.assertFalse(tv.has_references) + self.assertEquals(tv.render(CONTEXT, None), s) def _test_solo_ref(self, key): s = _var(key) - tv = RefValue(s) - res = tv.render(CONTEXT) - self.assertTrue(tv.has_references()) + tv = Value(s, SETTINGS, '') + res = tv.render(CONTEXT, None) + self.assertTrue(tv.has_references) self.assertEqual(res, CONTEXT[key]) def test_solo_ref_string(self): @@ -63,65 +67,65 @@ def test_solo_ref_bool(self): def test_single_subst_bothends(self): s = 'I like ' + _var('favcolour') + ' and I like it' - tv = RefValue(s) - self.assertTrue(tv.has_references()) - self.assertEqual(tv.render(CONTEXT), + tv = Value(s, SETTINGS, '') + self.assertTrue(tv.has_references) + self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) def test_single_subst_start(self): s = _var('favcolour') + ' is my favourite colour' - tv = RefValue(s) - self.assertTrue(tv.has_references()) - self.assertEqual(tv.render(CONTEXT), + tv = Value(s, SETTINGS, '') + self.assertTrue(tv.has_references) + self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) def test_single_subst_end(self): s = 'I like ' + _var('favcolour') - tv = RefValue(s) - self.assertTrue(tv.has_references()) - self.assertEqual(tv.render(CONTEXT), + tv = Value(s, SETTINGS, '') + self.assertTrue(tv.has_references) + self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) def test_deep_subst_solo(self): - var = PARAMETER_INTERPOLATION_DELIMITER.join(('motd', 'greeting')) - s = _var(var) - tv = RefValue(s) - self.assertTrue(tv.has_references()) - self.assertEqual(tv.render(CONTEXT), - _poor_mans_template(s, var, + motd = SETTINGS.delimiter.join(('motd', 'greeting')) + s = _var(motd) + tv = Value(s, SETTINGS, '') + self.assertTrue(tv.has_references) + self.assertEqual(tv.render(CONTEXT, None), + _poor_mans_template(s, motd, CONTEXT['motd']['greeting'])) def test_multiple_subst(self): - greet = PARAMETER_INTERPOLATION_DELIMITER.join(('motd', 'greeting')) + greet = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(greet) + ' I like ' + _var('favcolour') + '!' - tv = RefValue(s) - self.assertTrue(tv.has_references()) + tv = Value(s, SETTINGS, '') + self.assertTrue(tv.has_references) want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting']) want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour']) - self.assertEqual(tv.render(CONTEXT), want) + self.assertEqual(tv.render(CONTEXT, None), want) def test_multiple_subst_flush(self): - greet = PARAMETER_INTERPOLATION_DELIMITER.join(('motd', 'greeting')) + greet = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(greet) + ' I like ' + _var('favcolour') - tv = RefValue(s) - self.assertTrue(tv.has_references()) + tv = Value(s, SETTINGS, '') + self.assertTrue(tv.has_references) want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting']) want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour']) - self.assertEqual(tv.render(CONTEXT), want) + self.assertEqual(tv.render(CONTEXT, None), want) def test_undefined_variable(self): s = _var('no_such_variable') - tv = RefValue(s) - with self.assertRaises(UndefinedVariableError): - tv.render(CONTEXT) + tv = Value(s, SETTINGS, '') + with self.assertRaises(ResolveError): + tv.render(CONTEXT, None) def test_incomplete_variable(self): - s = PARAMETER_INTERPOLATION_SENTINELS[0] + 'incomplete' - with self.assertRaises(IncompleteInterpolationError): - tv = RefValue(s) + s = SETTINGS.reference_sentinels[0] + 'incomplete' + with self.assertRaises(ParseError): + tv = Value(s, SETTINGS, '') if __name__ == '__main__': unittest.main() diff --git a/reclass/values/value.py b/reclass/values/value.py new file mode 100644 index 00000000..451617ec --- /dev/null +++ b/reclass/values/value.py @@ -0,0 +1,107 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from .parser import Parser +from .dictitem import DictItem +from .listitem import ListItem +from .scaitem import ScaItem +from reclass.errors import InterpolationError + +from six import string_types + +class Value(object): + + _parser = Parser() + + def __init__(self, value, settings, uri, parse_string=True): + self._settings = settings + self.uri = uri + self.overwrite = False + self.constant = False + if isinstance(value, string_types): + if parse_string: + try: + self._item = self._parser.parse(value, self._settings) + except InterpolationError as e: + e.uri = self.uri + raise + else: + self._item = ScaItem(value, self._settings) + elif isinstance(value, list): + self._item = ListItem(value, self._settings) + elif isinstance(value, dict): + self._item = DictItem(value, self._settings) + else: + self._item = ScaItem(value, self._settings) + + def item_type(self): + return self._item.type + + def item_type_str(self): + return self._item.type_str() + + def is_container(self): + return self._item.is_container() + + @property + def allRefs(self): + return self._item.allRefs + + @property + def has_references(self): + return self._item.has_references + + @property + def has_inv_query(self): + return self._item.has_inv_query + + @property + def needs_all_envs(self): + if self._item.has_inv_query: + return self._item.needs_all_envs + return False + + def ignore_failed_render(self): + return self._item.ignore_failed_render + + @property + def is_complex(self): + return self._item.is_complex + + def get_references(self): + return self._item.get_references() + + def get_inv_references(self): + return self._item.get_inv_references() + + def assembleRefs(self, context): + if self._item.has_references: + self._item.assembleRefs(context) + + def render(self, context, inventory): + try: + return self._item.render(context, inventory) + except InterpolationError as e: + e.uri = self.uri + raise + + @property + def contents(self): + return self._item.contents + + def merge_over(self, value): + self._item = self._item.merge_over(value._item) + return self + + def __repr__(self): + return 'Value(%r)' % self._item + + def __str__(self): + return str(self._item) diff --git a/reclass/values/valuelist.py b/reclass/values/valuelist.py new file mode 100644 index 00000000..e8c3a0ce --- /dev/null +++ b/reclass/values/valuelist.py @@ -0,0 +1,186 @@ +# +# -*- coding: utf-8 -*- +# +# This file is part of reclass +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import copy +import sys + +from reclass.errors import ChangedConstantError, ResolveError, TypeMergeError + + +class ValueList(object): + + def __init__(self, value, settings): + self._settings = settings + self._refs = [] + self.allRefs = True + self._values = [value] + self._inv_refs = [] + self.has_inv_query = False + self.ignore_failed_render = False + self.is_complex = False + self._update() + + @property + def uri(self): + return '; '.join([str(x.uri) for x in self._values]) + + def append(self, value): + self._values.append(value) + self._update() + + def extend(self, values): + self._values.extend(values._values) + self._update() + + def _update(self): + self.assembleRefs() + self._check_for_inv_query() + self.is_complex = False + item_type = self._values[0].item_type() + for v in self._values: + if v.is_complex or v.constant or v.overwrite or v.item_type() != item_type: + self.is_complex = True + + @property + def has_references(self): + return len(self._refs) > 0 + + def get_inv_references(self): + return self._inv_refs + + def get_references(self): + return self._refs + + def _check_for_inv_query(self): + self.has_inv_query = False + self.ignore_failed_render = True + for value in self._values: + if value.has_inv_query: + self._inv_refs.extend(value.get_inv_references()) + self.has_inv_query = True + if value.ignore_failed_render() is False: + self.ignore_failed_render = False + if self.has_inv_query is False: + self.ignore_failed_render = False + + def assembleRefs(self, context={}): + self._refs = [] + self.allRefs = True + for value in self._values: + value.assembleRefs(context) + if value.has_references: + self._refs.extend(value.get_references()) + if value.allRefs is False: + self.allRefs = False + + @property + def needs_all_envs(self): + for value in self._values: + if value.needs_all_envs: + return True + return False + + def merge(self): + output = None + for n, value in enumerate(self._values): + if output is None: + output = value + else: + output = value.merge_over(output) + return output + + def render(self, context, inventory): + from reclass.datatypes.parameters import Parameters + + output = None + deepCopied = False + last_error = None + constant = False + for n, value in enumerate(self._values): + try: + new = value.render(context, inventory) + except ResolveError as e: + # only ignore failed renders if + # ignore_overwritten_missing_references is set and we are + # dealing with a scalar value and it's not the last item in the + # values list + if (self._settings.ignore_overwritten_missing_references + and not isinstance(output, (dict, list)) + and n != (len(self._values)-1)): + new = None + last_error = e + print("[WARNING] Reference '%s' undefined" % str(value), + file=sys.stderr) + else: + raise e + + if constant: + if self._settings.strict_constant_parameters: + raise ChangedConstantError('{0}; {1}'.format(self._values[n-1].uri, self._values[n].uri)) + else: + continue + + if output is None or value.overwrite: + output = new + deepCopied = False + else: + if isinstance(output, dict): + if isinstance(new, dict): + p1 = Parameters(output, self._settings, None, parse_strings=False) + p2 = Parameters(new, self._settings, None, parse_strings=False) + p1.merge(p2) + output = p1.as_dict() + elif isinstance(new, list): + raise TypeMergeError(self._values[n], self._values[n-1], self.uri) + elif self._settings.allow_scalar_over_dict or (self._settings.allow_none_override and new is None): + output = new + deepCopied = False + else: + raise TypeMergeError(self._values[n], self._values[n-1], self.uri) + elif isinstance(output, list): + if isinstance(new, list): + if not deepCopied: + output = copy.deepcopy(output) + deepCopied = True + output.extend(new) + elif isinstance(new, dict): + raise TypeMergeError(self._values[n], self._values[n-1], self.uri) + elif self._settings.allow_scalar_over_list or (self._settings.allow_none_override and new is None): + output = new + deepCopied = False + else: + raise TypeMergeError(self._values[n], self._values[n-1], self.uri) + else: + if isinstance(new, dict): + if self._settings.allow_dict_over_scalar: + output = new + deepCopied = False + else: + raise TypeMergeError(self._values[n], self._values[n-1], self.uri) + elif isinstance(new, list): + if self._settings.allow_list_over_scalar: + output_list = list() + output_list.append(output) + output_list.extend(new) + output = output_list + deepCopied = True + else: + raise TypeMergeError(self._values[n], self._values[n-1], self.uri) + else: + output = new + deepCopied = False + + if value.constant: + constant = True + + if isinstance(output, (dict, list)) and last_error is not None: + raise last_error + + return output diff --git a/reclass/version.py b/reclass/version.py index a2aa99a7..5a40c2ed 100644 --- a/reclass/version.py +++ b/reclass/version.py @@ -6,13 +6,20 @@ # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + RECLASS_NAME = 'reclass' -DESCRIPTION = 'merge data by recursive descent down an ancestry hierarchy' -VERSION = '1.4.1' -AUTHOR = 'martin f. krafft' -AUTHOR_EMAIL = 'reclass@pobox.madduck.net' -MAINTAINER = 'Jason Ritzke (@Rtzq0)' -MAINTAINER_EMAIL = 'jasonritzke@4loopz.com' -COPYRIGHT = 'Copyright © 2007–14 ' + AUTHOR +DESCRIPTION = ('merge data by recursive descent down an ancestry hierarchy ' + '(forked extended version)') +VERSION = '1.6.0' +AUTHOR = 'martin f. krafft / Andrew Pickford / salt-formulas community' +AUTHOR_EMAIL = 'salt-formulas@freelists.org' +MAINTAINER = 'salt-formulas community' +MAINTAINER_EMAIL = 'salt-formulas@freelists.org' +COPYRIGHT = ('Copyright © 2007–14 martin f. krafft, extensions © 2017 Andrew' + ' Pickford, extensions © salt-formulas community') LICENCE = 'Artistic Licence 2.0' -URL = 'https://github.com/madduck/reclass' +URL = 'https://github.com/salt-formulas/reclass' diff --git a/releasenotes/config.yaml b/releasenotes/config.yaml new file mode 100644 index 00000000..6a6923d9 --- /dev/null +++ b/releasenotes/config.yaml @@ -0,0 +1,66 @@ +--- +# Usage: +# +# reno -qd .releasenotes list +# reno -qd .releasenotes new slug-title --edit +# reno -qd .releasenotes report --no-show-source + +# Change prelude_section_name to 'summary' from default value prelude +prelude_section_name: summary +show_source: False +sections: + - [summary, Summary] + - [features, New features] + - [fixes, Bug fixes] + - [others, Other notes] +template: | + --- + # Author the following sections or remove the section if it is not related. + # Use one release note per a feature. + # + # If you miss a section from the list below, please first submit a review + # adding it to .releasenotes/config.yaml. + # + # Format content with reStructuredText (RST). + # **Formatting examples:** + # - | + # This is a brief description of the feature. It may include a + # number of components: + # + # * List item 1 + # * List item 2. + # This code block below will appear as part of the list item 2: + # + # .. code-block:: yaml + # + # classes: + # - system.class.to.load + # + # The code block below will appear on the same level as the feature + # description: + # + # .. code-block:: text + # + # provide model/formula pillar snippets + + + summary: > + This section is not mandatory. Use it to highlight the change. + + features: + - Use the list to record summary of **NEW** features + - Provide detailed description of the feature indicating the use cases + when users benefit from using it + - Provide steps to deploy the feature (if the procedure is complicated + indicate during what stage of the deployment workflow it should be + deployed). + - Provide troubleshooting information, if any. + + fixes: + - Use the list to record summary of a bug fix for blocker, critical. + - Provide a brief summary of what has been fixed. + + others: + - Author any additional notes. Use this section if note is not related to + any of the common sections above. + diff --git a/releasenotes/notes/escaping-references-e76699d8ca010013.yaml b/releasenotes/notes/escaping-references-e76699d8ca010013.yaml new file mode 100644 index 00000000..41845ee8 --- /dev/null +++ b/releasenotes/notes/escaping-references-e76699d8ca010013.yaml @@ -0,0 +1,3 @@ +--- +others: + - The escaping of references changes how the constructs '\${xxx}' and '\\${xxx}' are rendered. diff --git a/requirements.txt b/requirements.txt index c3726e8b..5f6aed18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,5 @@ +pyparsing pyyaml +six +enum34 +ddt diff --git a/run_tests.py b/run_tests.py index 1506945f..aeccb577 100755 --- a/run_tests.py +++ b/run_tests.py @@ -6,6 +6,10 @@ # Copyright © 2007–13 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals import unittest tests = unittest.TestLoader().discover('reclass') diff --git a/setup.cfg b/setup.cfg index d645be77..c5cfe6b2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,3 +3,4 @@ # 3. If at all possible, it is good practice to do this. If you cannot, you # will need to generate wheels for each Python version that you support. universal=0 + diff --git a/setup.py b/setup.py index 3830b842..ab23207f 100644 --- a/setup.py +++ b/setup.py @@ -6,6 +6,10 @@ # Copyright © 2007–13 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals from reclass.version import * from setuptools import setup, find_packages @@ -38,7 +42,7 @@ url = URL, packages = find_packages(exclude=['*tests']), #FIXME validate this entry_points = { 'console_scripts': console_scripts }, - install_requires = ['pyyaml'], + install_requires = ['pyparsing', 'pyyaml', 'six', 'enum34', 'ddt'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) classifiers=[ 'Development Status :: 4 - Beta', diff --git a/test/model/default/classes/first.yml b/test/model/default/classes/first.yml new file mode 100644 index 00000000..9b72a263 --- /dev/null +++ b/test/model/default/classes/first.yml @@ -0,0 +1,32 @@ +parameters: + _param: + some: param + colour: red + lab: + name: test + label: first + colour: + escaped: \${_param:colour} + doubleescaped: \\${_param:colour} + unescaped: ${_param:colour} + colours: + red: + name: red + blue: + name: blue + one: + a: 1 + b: 2 + two: + c: 3 + d: 4 + three: + e: 5 + list_to_override: + - one + - two + dict_to_override: + one: 1 + two: 2 + + diff --git a/test/model/default/classes/lab/env/dev.yml b/test/model/default/classes/lab/env/dev.yml new file mode 100644 index 00000000..0cce363c --- /dev/null +++ b/test/model/default/classes/lab/env/dev.yml @@ -0,0 +1,4 @@ + +parameters: + lab: + name: dev diff --git a/test/model/default/classes/second.yml b/test/model/default/classes/second.yml new file mode 100644 index 00000000..dab50c7e --- /dev/null +++ b/test/model/default/classes/second.yml @@ -0,0 +1,9 @@ +classes: +- first + +parameters: + will: + warn: + at: + second: ${_param:notfound} + three: ${one} diff --git a/test/model/default/classes/third.yml b/test/model/default/classes/third.yml new file mode 100644 index 00000000..81fd9790 --- /dev/null +++ b/test/model/default/classes/third.yml @@ -0,0 +1,24 @@ +classes: +- second + +parameters: + _param: + notfound: exist + myparam: ${_param:some} + will: + not: + fail: + at: + tree: ${_param:notfound} + 1: + an_numeric_key: true + as_a_dict: 1 + 2: + - as_a_list + 3: value + three: ${two} + empty: + list: [] + dict: {} + ~list_to_override: ${empty:list} + ~dict_to_override: ${empty:dict} diff --git a/test/model/default/nodes/reclass.yml b/test/model/default/nodes/reclass.yml new file mode 100644 index 00000000..94b75197 --- /dev/null +++ b/test/model/default/nodes/reclass.yml @@ -0,0 +1,3 @@ + +classes: +- third diff --git a/test/model/default/reclass-config.yml b/test/model/default/reclass-config.yml new file mode 100644 index 00000000..9d8f30fa --- /dev/null +++ b/test/model/default/reclass-config.yml @@ -0,0 +1 @@ +storage_type: yaml_fs diff --git a/test/model/extensions/classes/defaults.yml b/test/model/extensions/classes/defaults.yml new file mode 100644 index 00000000..5d17c2be --- /dev/null +++ b/test/model/extensions/classes/defaults.yml @@ -0,0 +1,4 @@ + +parameters: + config: + defaults: True diff --git a/test/model/extensions/classes/first.yml b/test/model/extensions/classes/first.yml new file mode 100644 index 00000000..96ece276 --- /dev/null +++ b/test/model/extensions/classes/first.yml @@ -0,0 +1,6 @@ +parameters: + _param: + some: param + lab: + name: test + label: first diff --git a/test/model/extensions/classes/lab/env/dev.yml b/test/model/extensions/classes/lab/env/dev.yml new file mode 100644 index 00000000..0cce363c --- /dev/null +++ b/test/model/extensions/classes/lab/env/dev.yml @@ -0,0 +1,4 @@ + +parameters: + lab: + name: dev diff --git a/test/model/extensions/classes/relative/init.yml b/test/model/extensions/classes/relative/init.yml new file mode 100644 index 00000000..117e4fad --- /dev/null +++ b/test/model/extensions/classes/relative/init.yml @@ -0,0 +1,3 @@ + +classes: + - .nested diff --git a/test/model/extensions/classes/relative/nested/common.yml b/test/model/extensions/classes/relative/nested/common.yml new file mode 100644 index 00000000..28cc0b2b --- /dev/null +++ b/test/model/extensions/classes/relative/nested/common.yml @@ -0,0 +1,5 @@ + +parameters: + nested: + deep: + common: to be overriden diff --git a/test/model/extensions/classes/relative/nested/deep/common.yml b/test/model/extensions/classes/relative/nested/deep/common.yml new file mode 100644 index 00000000..b77a24c4 --- /dev/null +++ b/test/model/extensions/classes/relative/nested/deep/common.yml @@ -0,0 +1,5 @@ + +parameters: + nested: + deep: + common: False diff --git a/test/model/extensions/classes/relative/nested/deep/init.yml b/test/model/extensions/classes/relative/nested/deep/init.yml new file mode 100644 index 00000000..cd12d103 --- /dev/null +++ b/test/model/extensions/classes/relative/nested/deep/init.yml @@ -0,0 +1,9 @@ + +classes: + - .common + +parameters: + nested: + deep: + init: True + common: True diff --git a/test/model/extensions/classes/relative/nested/dive/session.yml b/test/model/extensions/classes/relative/nested/dive/session.yml new file mode 100644 index 00000000..9abd1eea --- /dev/null +++ b/test/model/extensions/classes/relative/nested/dive/session.yml @@ -0,0 +1,5 @@ + +parameters: + nested: + deep: + session: True diff --git a/test/model/extensions/classes/relative/nested/init.yml b/test/model/extensions/classes/relative/nested/init.yml new file mode 100644 index 00000000..9f023830 --- /dev/null +++ b/test/model/extensions/classes/relative/nested/init.yml @@ -0,0 +1,10 @@ + +classes: + - .common + - .deep + - .dive.session + +parameters: + nested: + deep: + init: True diff --git a/test/model/extensions/classes/second.yml b/test/model/extensions/classes/second.yml new file mode 100644 index 00000000..929d7465 --- /dev/null +++ b/test/model/extensions/classes/second.yml @@ -0,0 +1,9 @@ +classes: +- first +- relative + +parameters: + will: + warn: + at: + second: ${_param:notfound} diff --git a/test/model/extensions/classes/third.yml b/test/model/extensions/classes/third.yml new file mode 100644 index 00000000..a5157cf6 --- /dev/null +++ b/test/model/extensions/classes/third.yml @@ -0,0 +1,14 @@ +classes: +- missing.class +- second +- .defaults + +parameters: + _param: + notfound: exist + myparam: ${_param:some} + will: + not: + fail: + at: + tree: ${_param:notfound} diff --git a/test/model/extensions/nodes/reclass.yml b/test/model/extensions/nodes/reclass.yml new file mode 100644 index 00000000..5d5b3ec4 --- /dev/null +++ b/test/model/extensions/nodes/reclass.yml @@ -0,0 +1,3 @@ + +classes: +- .third diff --git a/test/model/extensions/reclass-config.yml b/test/model/extensions/reclass-config.yml new file mode 100644 index 00000000..6e2f1010 --- /dev/null +++ b/test/model/extensions/reclass-config.yml @@ -0,0 +1,3 @@ +storage_type: yaml_fs +ignore_class_notfound: True +ignore_class_regexp: ['.*']