From e3bfefd10c33410bd76665eb3995fcf88de8fa6c Mon Sep 17 00:00:00 2001 From: Python3pkg Date: Wed, 17 May 2017 23:36:38 -0700 Subject: [PATCH 1/3] Convert to python3 --- carousel/__init__.py | 8 +- carousel/__init__.py.bak | 48 ++ carousel/contrib/readers.py | 6 +- carousel/contrib/readers.py.bak | 158 ++++ carousel/contrib/tests/test_data_readers.py | 24 +- .../contrib/tests/test_data_readers.py.bak | 345 ++++++++ carousel/core/__init__.py | 30 +- carousel/core/__init__.py.bak | 374 ++++++++ carousel/core/calculations.py | 9 +- carousel/core/calculations.py.bak | 120 +++ carousel/core/calculators.py | 20 +- carousel/core/calculators.py.bak | 258 ++++++ carousel/core/data_readers.py | 32 +- carousel/core/data_readers.py.bak | 805 ++++++++++++++++++ carousel/core/data_sources.py | 23 +- carousel/core/data_sources.py.bak | 303 +++++++ carousel/core/formulas.py | 13 +- carousel/core/formulas.py.bak | 300 +++++++ carousel/core/layers.py | 16 +- carousel/core/layers.py.bak | 408 +++++++++ carousel/core/models.py | 21 +- carousel/core/models.py.bak | 344 ++++++++ carousel/core/outputs.py | 7 +- carousel/core/outputs.py.bak | 141 +++ carousel/core/simulations.py | 41 +- carousel/core/simulations.py.bak | 555 ++++++++++++ carousel/docs/conf.py | 10 +- carousel/docs/conf.py.bak | 314 +++++++ carousel/tests/test_calcs.py | 4 +- carousel/tests/test_calcs.py.bak | 124 +++ carousel/tests/test_data.py | 2 +- carousel/tests/test_data.py.bak | 181 ++++ carousel/tests/test_formulas.py | 2 +- carousel/tests/test_formulas.py.bak | 95 +++ carousel/tests/test_outputs.py | 2 +- carousel/tests/test_outputs.py.bak | 40 + carousel/tests/test_sim.py | 2 +- carousel/tests/test_sim.py.bak | 141 +++ examples/PVPower/formulas/utils.py | 4 +- examples/PVPower/formulas/utils.py.bak | 100 +++ .../pvpower/sandia_perfmod_newstyle.py | 2 +- .../pvpower/sandia_perfmod_newstyle.py.bak | 509 +++++++++++ .../pvpower/sandia_performance_model.py | 2 +- .../pvpower/sandia_performance_model.py.bak | 167 ++++ .../PVPower/pvpower/tests/test_pvpower.py | 2 +- .../PVPower/pvpower/tests/test_pvpower.py.bak | 114 +++ 46 files changed, 6082 insertions(+), 144 deletions(-) create mode 100644 carousel/__init__.py.bak create mode 100644 carousel/contrib/readers.py.bak create mode 100644 carousel/contrib/tests/test_data_readers.py.bak create mode 100644 carousel/core/__init__.py.bak create mode 100644 carousel/core/calculations.py.bak create mode 100644 carousel/core/calculators.py.bak create mode 100644 carousel/core/data_readers.py.bak create mode 100644 carousel/core/data_sources.py.bak create mode 100644 carousel/core/formulas.py.bak create mode 100644 carousel/core/layers.py.bak create mode 100644 carousel/core/models.py.bak create mode 100644 carousel/core/outputs.py.bak create mode 100644 carousel/core/simulations.py.bak create mode 100644 carousel/docs/conf.py.bak create mode 100644 carousel/tests/test_calcs.py.bak create mode 100644 carousel/tests/test_data.py.bak create mode 100644 carousel/tests/test_formulas.py.bak create mode 100644 carousel/tests/test_outputs.py.bak create mode 100644 carousel/tests/test_sim.py.bak create mode 100644 examples/PVPower/formulas/utils.py.bak create mode 100644 examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak create mode 100644 examples/PVPower/pvpower/sandia_performance_model.py.bak create mode 100644 examples/PVPower/pvpower/tests/test_pvpower.py.bak diff --git a/carousel/__init__.py b/carousel/__init__.py index 8afd247..467554c 100644 --- a/carousel/__init__.py +++ b/carousel/__init__.py @@ -41,8 +41,8 @@ def get_current_version(*args, **kwargs): GIT_TAG = VERSION # if Git tag is none use version file VERSION = GIT_TAG # version -__author__ = u'Mark Mikofski' -__email__ = u'mark.mikofski@sunpowercorp.com' -__url__ = u'https://github.com/SunPower/Carousel' +__author__ = 'Mark Mikofski' +__email__ = 'mark.mikofski@sunpowercorp.com' +__url__ = 'https://github.com/SunPower/Carousel' __version__ = VERSION -__release__ = u'Caramel Corn' +__release__ = 'Caramel Corn' diff --git a/carousel/__init__.py.bak b/carousel/__init__.py.bak new file mode 100644 index 0000000..8afd247 --- /dev/null +++ b/carousel/__init__.py.bak @@ -0,0 +1,48 @@ +""" +Carousel Python Model Simulation Framework + +Mark Mikofski (c) 2015 +""" + +import os +import importlib + +# try to import Dulwich or create dummies +try: + from dulwich.contrib.release_robot import get_current_version + from dulwich.repo import NotGitRepository +except ImportError: + NotGitRepository = NotImplementedError + + def get_current_version(*args, **kwargs): + raise NotGitRepository + +# Dulwich Release Robot +BASEDIR = os.path.dirname(__file__) # this directory +PROJDIR = os.path.dirname(BASEDIR) +VER_FILE = 'version' # name of file to store version +# use release robot to try to get current Git tag +try: + GIT_TAG = get_current_version(PROJDIR) +except NotGitRepository: + GIT_TAG = None +# check version file +try: + version = importlib.import_module('%s.%s' % (__name__, VER_FILE)) +except ImportError: + VERSION = None +else: + VERSION = version.VERSION +# update version file if it differs from Git tag +if GIT_TAG is not None and VERSION != GIT_TAG: + with open(os.path.join(BASEDIR, VER_FILE + '.py'), 'w') as vf: + vf.write('VERSION = "%s"\n' % GIT_TAG) +else: + GIT_TAG = VERSION # if Git tag is none use version file +VERSION = GIT_TAG # version + +__author__ = u'Mark Mikofski' +__email__ = u'mark.mikofski@sunpowercorp.com' +__url__ = u'https://github.com/SunPower/Carousel' +__version__ = VERSION +__release__ = u'Caramel Corn' diff --git a/carousel/contrib/readers.py b/carousel/contrib/readers.py index 20c0f1d..2783b62 100644 --- a/carousel/contrib/readers.py +++ b/carousel/contrib/readers.py @@ -66,7 +66,7 @@ def load_data(self, *args, **kwargs): # get positional argument names from parameters and apply them to args # update data with additional kwargs argpos = { - v['extras']['argpos']: k for k, v in self.parameters.iteritems() + v['extras']['argpos']: k for k, v in self.parameters.items() if 'argpos' in v['extras'] } data = dict( @@ -85,7 +85,7 @@ def apply_units_to_cache(self, data): :return: data with units applied """ # if units key exists then apply - for k, v in self.parameters.iteritems(): + for k, v in self.parameters.items(): if v and v.get('units'): data[k] = Q_(data[k], v.get('units')) return data @@ -145,7 +145,7 @@ class HDF5Reader(ArgumentReader): def load_data(self, h5file, *args, **kwargs): with h5py.File(h5file) as h5f: h5data = dict.fromkeys(self.parameters) - for param, attrs in self.parameters.iteritems(): + for param, attrs in self.parameters.items(): LOGGER.debug('parameter:\n%r', param) node = attrs['extras']['node'] # full name of node # composite datatype member diff --git a/carousel/contrib/readers.py.bak b/carousel/contrib/readers.py.bak new file mode 100644 index 0000000..20c0f1d --- /dev/null +++ b/carousel/contrib/readers.py.bak @@ -0,0 +1,158 @@ +""" +Custom data readers including :class:`carousel.contrib.readers.ArgumentReader`, +:class:`carousel.contrib.readers.DjangoModelReader` and +:class:`carousel.contrib.readers.HDF5Reader`. +""" + +import numpy as np +import h5py +from carousel.core.data_readers import DataReader +from carousel.core.data_sources import DataParameter +from carousel.core import Q_ +import logging + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + + +def copy_model_instance(obj): + """ + Copy Django model instance as a dictionary excluding automatically created + fields like an auto-generated sequence as a primary key or an auto-created + many-to-one reverse relation. + + :param obj: Django model object + :return: copy of model instance as dictionary + """ + meta = getattr(obj, '_meta') # make pycharm happy + # dictionary of model values excluding auto created and related fields + return {f.name: getattr(obj, f.name) + for f in meta.get_fields(include_parents=False) + if not f.auto_created} + + +# TODO: make parameters consistent for all readers +# TODO: parameters set by attributes in data source model fields +# EG: ghi = FloatField('GHI', units='W/m**2') +# EG: solar_azimuth = FloatField('solar azimuth', units='degrees') +# TODO: some parameters set in class Meta +# EG: class Meta: args = ['GHI', 'azimuth'] + +class ArgumentReader(DataReader): + """ + Read arguments passed directly to a simulation. + + The argument parameters dictionary should have two keys: `args` and `kwargs` + which consist of the names and attributes of the positional and keyword + arguments respectively. For example:: + + { + 'GHI': {'units': 'W/m**2', 'isconstant': False, 'argpos': 0}, + 'azimuth': {'units': 'degrees', 'isconstant': False, 'argpos': 1}, + 'DNI': {'units': 'W/m**2', 'isconstant': False}, + 'zenith': {'units': 'degrees', 'isconstant': False} + } + + """ + #: True if reader accepts ``filename`` argument + is_file_reader = False # not a file reader + + def load_data(self, *args, **kwargs): + """ + Collects positional and keyword arguments into `data` and applies units. + + :return: data + """ + # get positional argument names from parameters and apply them to args + # update data with additional kwargs + argpos = { + v['extras']['argpos']: k for k, v in self.parameters.iteritems() + if 'argpos' in v['extras'] + } + data = dict( + {argpos[n]: a for n, a in enumerate(args)}, **kwargs + ) + return self.apply_units_to_cache(data) + + def apply_units_to_cache(self, data): + """ + Applies units to data when a proxy reader is used. For example if the + data is cached as JSON and retrieved using the + :class:`~carousel.core.data_readers.JSONReader`, then units can be + applied from the original parameter schema. + + :param data: Data read by proxy reader. + :return: data with units applied + """ + # if units key exists then apply + for k, v in self.parameters.iteritems(): + if v and v.get('units'): + data[k] = Q_(data[k], v.get('units')) + return data + + +class DjangoModelReader(ArgumentReader): + """ + Reads arguments that are Django objects or lists of objects. + """ + def __init__(self, parameters=None, meta=None): + #: Django model + self.model = meta.model + model_meta = getattr(self.model, '_meta') # make pycharm happy + # model fields excluding AutoFields and related fields like one-to-many + all_model_fields = [ + f for f in model_meta.get_fields(include_parents=False) + if not f.auto_created + ] + all_field_names = [f.name for f in all_model_fields] # field names + # use all fields if no parameters given + if parameters is None: + parameters = DataParameter.fromkeys( + all_field_names, {} + ) + fields = getattr(meta, 'fields', all_field_names) # specified fields + LOGGER.debug('fields:\n%r', fields) + exclude = getattr(meta, 'exclude', []) # specifically excluded fields + for f in all_model_fields: + # skip any fields not specified in data source + if f.name not in fields or f.name in exclude: + LOGGER.debug('skipping %s', f.name) + continue + # add field to parameters or update parameters with field type + param_dict = {'ftype': f.get_internal_type()} + if f.name in parameters: + parameters[f.name]['extras'].update(param_dict) + else: + parameters[f.name] = DataParameter(**param_dict) + super(DjangoModelReader, self).__init__(parameters, meta) + + def load_data(self, model_instance, *args, **kwargs): + """ + Apply units to model. + :return: data + """ + model_dict = copy_model_instance(model_instance) + return super(DjangoModelReader, self).load_data(**model_dict) + + +class HDF5Reader(ArgumentReader): + """ + Reads data from an HDF5 file + """ + #: True if reader accepts ``filename`` argument + is_file_reader = True # is a file reader + + def load_data(self, h5file, *args, **kwargs): + with h5py.File(h5file) as h5f: + h5data = dict.fromkeys(self.parameters) + for param, attrs in self.parameters.iteritems(): + LOGGER.debug('parameter:\n%r', param) + node = attrs['extras']['node'] # full name of node + # composite datatype member + member = attrs['extras'].get('member') + if member is not None: + # if node is a table then get column/field/description + h5data[param] = np.asarray(h5f[node][member]) # copy member + else: + h5data[param] = np.asarray(h5f[node]) # copy array + return super(HDF5Reader, self).load_data(**h5data) diff --git a/carousel/contrib/tests/test_data_readers.py b/carousel/contrib/tests/test_data_readers.py index 86f6dec..3a9e590 100644 --- a/carousel/contrib/tests/test_data_readers.py +++ b/carousel/contrib/tests/test_data_readers.py @@ -34,18 +34,18 @@ ('DirectNormalRadiation', ' %(asctime)s %(funcName)s:%(lineno)d\n> ' + + '\n'.join(logging.BASIC_FORMAT.rsplit(':', 1))) +logging.basicConfig(datefmt=LOG_DATEFMT, format=LOG_FORMAT) +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + +# unit registry, quantity constructor and extra units registry definitions +UREG = pint.UnitRegistry() # registry of units +Q_ = UREG.Quantity # quantity constructor for ambiguous quantities like degC +UREG.define('fraction = []') # define new dimensionless base unit for percents +UREG.define('percent = fraction / 100.0 = pct') # can't use "%" only ascii +UREG.define('suns = []') # dimensionless unit equivalent to 1000.0 [W/m/m] + +# define PV solar context +_PV = pint.Context('pv') +# define transformation of suns to power flux and vice versa +E0 = 1000.0 * UREG.W / UREG.m / UREG.m # 1 sun +_PV.add_transformation('[]', '[power] / [area]', lambda ureg, x: x * E0) +_PV.add_transformation('[power] / [area]', '[]', lambda ureg, x: x / E0) +UREG.add_context(_PV) + + +def _listify(x): + """ + If x is not a list, make it a list. + """ + return list(x) if isinstance(x, (list, tuple)) else [x] + + +class Registry(dict): + """ + Base class for a registry. + + The register method can be used to add new keys to the registry only if + they don't already exist. A registry can also have meta data associated + with subsets of the registered keys. To enforce rules on meta when the keys + are registered, override the register method and raise exceptions before + calling the :func:`super` built-in function. + + By default there are no meta attributes, only the register method. + To set meta attributes, in a subclass, set the ``meta_names`` class + attribute in the subclass:: + + class MyRegistry(Registry): + meta_names = ['meta1', 'meta2', ...] + + The ``Registry`` superclass will check that the meta names are not already + attributes and then set instance attributes as empty dictionaries in the + subclass. To document them, use the class docstring or document them in the + documentation API. + """ + meta_names = [] + + def __init__(self): + self.meta_names = _listify(self.meta_names) # convert to list + for m in self.meta_names: + # check for m in cls and bases + if m in dir(Registry): + msg = ('Class %s already has %s member.' % + (self.__class__.__name__, m)) + raise AttributeError(msg) + setattr(self, m, {}) # create instance attribute and set to dict() + super(Registry, self).__init__() + + def register(self, newitems, *args, **kwargs): + """ + Register newitems in registry. + + :param newitems: New items to add to registry. When registering new + items, keys are not allowed to override existing keys in the + registry. + :type newitems: mapping + :param args: Positional arguments with meta data corresponding to order + of meta names class attributes + :param kwargs: Maps of corresponding meta for new keys. Each set of + meta keys must be a subset of the new item keys. + :raises: + :exc:`~carousel.core.exceptions.DuplicateRegItemError`, + :exc:`~carousel.core.exceptions.MismatchRegMetaKeysError` + """ + newkeys = newitems.viewkeys() # set of the new item keys + if any(self.viewkeys() & newkeys): # duplicates + raise DuplicateRegItemError(self.viewkeys() & newkeys) + self.update(newitems) # register new item + # update meta fields + kwargs.update(zip(self.meta_names, args)) + for k, v in kwargs.iteritems(): + meta = getattr(self, k) # get the meta attribute + if v: + if not v.viewkeys() <= newkeys: + raise MismatchRegMetaKeysError(newkeys - v.viewkeys()) + meta.update(v) # register meta + + def unregister(self, items): + """ + Remove items from registry. + + :param items: + """ + items = _listify(items) + # get all members of Registry except private, special or class + meta_names = (m for m in vars(self).iterkeys() + if (not m.startswith('_') and m not in dir(Registry))) + # check that meta names matches + # FIXME: this is so lame. replace this with something more robust + for m in meta_names: + if m not in self.meta_names: + raise AttributeError('Meta name %s not listed.') + # pop items from Registry and from meta + for it in items: + if it in self: + self.pop(it) + for m in (getattr(self, m_) for m_ in self.meta_names): + if it in m: + m.pop(it) + + +# decorator to use with formulas to convert argument units +def convert_args(test_fcn, *test_args): + """ + Decorator to be using in formulas to convert ``test_args`` depending on + the ``test_fcn``. + + :param test_fcn: A test function that converts arguments. + :type test_fcn: function + :param test_args: Names of args to convert using ``test_fcn``. + :type test_args: str + + The following test functions are available. + * :func:`dimensionless_to_index` + + Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: + + @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') + def f_max_T(Tcell24, dawn_idx, eve_idx): + idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) + return Tcell24[idx], idx + """ + def wrapper(origfcn): + @functools.wraps(origfcn) + def newfcn(*args, **kwargs): + argspec = getargspec(origfcn) # use ``inspect`` to get arg names + kwargs.update(zip(argspec.args, args)) # convert args to kw + # loop over test args + for a in test_args: + # convert a if it's in args + if a in argspec.args: + kwargs[a] = test_fcn(kwargs[a]) # update kwargs + # call original function with converted args + return origfcn(**kwargs) + # return wrapped function + return newfcn + # return the wrapper function that consumes the original function + return wrapper + +# NOTE: Preferred way to compare units is with dimensionality +# EG: (25 * UREG.degC).dimensionality == UREG.degC.dimensionality +# XXX: Really? because this works too, seems way better! +# EG: (25 * UREG.degC).units = UREG.degC + + +def dimensionless_to_index(index): + # convert dimensionless to index + if not index.dimensionality: + index = index.magnitude + else: + raise TypeError('Indices must be dimensionless.') + # TODO: make an exception called IndexUnitsError + return index + + +# custom JSON encoder to serialize Quantities and NumPy arrays +class CarouselJSONEncoder(json.JSONEncoder): + def default(self, o): + """ + JSONEncoder default method that converts NumPy arrays and quantities + objects to lists. + """ + if isinstance(o, Q_): + return o.magnitude + elif isinstance(o, np.ndarray): + return o.tolist() + else: + # raise TypeError if not serializable + return super(CarouselJSONEncoder, self).default(o) + + +def get_public_attributes(cls, as_list=True): + """ + Return class attributes that are neither private nor magic. + + :param cls: class + :param as_list: [True] set to False to return generator + :return: only public attributes of class + """ + attrs = (a for a in dir(cls) if not a.startswith('_')) + if as_list: + return list(attrs) + return attrs + + +class CommonBase(type): + """ + Provides common metaclass methods. + + * :meth:`get_parents` ensures initialization only from subclasses of the + main class and not the main class itself + * :meth:`set_param_file_or_parameters` adds class attributes ``param_file`` + or ``parameters`` depending on whether the path and file of the parameters + are given or if the parameters are listed as class attributes. + + Base classes must implement the ``_path_attr`` and ``_file_attr`` as class + attributes:: + + class ExampleBase(CommonBase): + _path_attr = 'outputs_path' # class attribute with parameter path + _file_attr = 'outputs_file' # class attribute with parameter file + """ + _path_attr = NotImplemented + _file_attr = NotImplemented + _param_cls = NotImplemented + # names of inferred objects + _meta_cls = 'Meta' # nested class or dictionary containing class options + _meta_attr = '_meta' # collected meta from classes, bases and files + _param_attr = 'parameters' # parameters collected from classes and files + _param_file = 'param_file' # optional file containing parameters + + @classmethod + def set_meta(mcs, bases, attr): + """ + Get all of the ``Meta`` classes from bases and combine them with this + class. + + Pops or creates ``Meta`` from attributes, combines all bases, adds + ``_meta`` to attributes with all meta + + :param bases: bases of this class + :param attr: class attributes + :return: attributes with ``Meta`` class from combined parents + """ + # pop the meta class from the attributes + meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) + # get a list of the meta public class attributes + meta_attrs = get_public_attributes(meta) + # check all bases for meta + for base in bases: + base_meta = getattr(base, mcs._meta_cls, None) + # skip if base has no meta + if base_meta is None: + continue + # loop over base meta + for a in get_public_attributes(base_meta, as_list=False): + # skip if already in meta + if a in meta_attrs: + continue + # copy meta-option attribute from base + setattr(meta, a, getattr(base_meta, a)) + attr[mcs._meta_attr] = meta # set _meta combined from bases + return attr + + @classmethod + def set_param_file_or_parameters(mcs, attr): + """ + Set parameters from class attributes that are instances of + :class:`~carousel.core.Parameter` or from a parameter file. + + Any class attributes that are instances of + :class:`~carousel.core.Parameter` are popped from the class and added to + a the ``parameters`` attribute, which is a dictionary of the parameters. + + :param attr: class attributes + :return: new list of class attributes with parameters + """ + meta = attr[mcs._meta_attr] # look for parameter file path in meta + cls_path = getattr(meta, mcs._path_attr, None) + cls_file = getattr(meta, mcs._file_attr, None) + # read parameters + attr[mcs._param_attr] = {} + attr[mcs._param_file] = None + # read parameters from file + if None not in [cls_path, cls_file]: + param_file = os.path.join(cls_path, cls_file) + attr[mcs._param_file] = param_file + # read and load JSON parameter map file as "parameters" + with open(param_file, 'r') as param_file: + file_params = json.load(param_file) + # update meta from file + for k, v in file_params.pop(mcs._meta_cls, {}).iteritems(): + setattr(meta, k, v) + # dictionary of parameters for reading source file + attr[mcs._param_attr] = { + k: mcs._param_cls(**v) for k, v in file_params.iteritems() + } + # get parameters from class + parameters = dict.fromkeys( + k for k, v in attr.iteritems() if isinstance(v, Parameter) + ) + # update parameters + for k in parameters: + attr[mcs._param_attr][k] = attr.pop(k) + return attr + + @staticmethod + def get_parents(bases, parent): + """ + Ensures that initialization only performed on subclasses of parent + https://github.com/django/django/blob/master/django/db/models/base.py + + :param bases: Bases to compare against parent. + :type bases: list + :param parent: Superclass that bases should be subclassed from. + :return: Bases subclassed from parent. + :rtype: list + """ + return [b for b in bases if isinstance(b, parent)] + + +class Parameter(dict): + _attrs = [] + + def __init__(self, *args, **kwargs): + items = dict(zip(self._attrs, args)) + extras = {} + for key, val in kwargs.iteritems(): + if key in self._attrs: + items[key] = val + else: + extras[key] = val + LOGGER.warning('This key: "%s" is not an attribute.', key) + super(Parameter, self).__init__(items, extras=extras) + + def __repr__(self): + fmt = ('<%s(' % self.__class__.__name__) + fmt += ', '.join('%s=%r' % (k, v) for k, v in self.iteritems()) + fmt += ')>' + return fmt diff --git a/carousel/core/calculations.py b/carousel/core/calculations.py index acda162..ee2268f 100644 --- a/carousel/core/calculations.py +++ b/carousel/core/calculations.py @@ -48,9 +48,9 @@ def register(self, new_calc, *args, **kwargs): :param new_calc: register new calculation """ - kwargs.update(zip(self.meta_names, args)) + kwargs.update(list(zip(self.meta_names, args))) # dependencies should be a list of other calculations - if isinstance(kwargs['dependencies'], basestring): + if isinstance(kwargs['dependencies'], str): kwargs['dependencies'] = [kwargs['dependencies']] # call super method, now meta can be passed as args or kwargs. super(CalcRegistry, self).register(new_calc, **kwargs) @@ -76,11 +76,10 @@ def __new__(mcs, name, bases, attr): return super(CalcBase, mcs).__new__(mcs, name, bases, attr) -class Calc(object): +class Calc(object, metaclass=CalcBase): """ A class for all calculations. """ - __metaclass__ = CalcBase def __init__(self): meta = getattr(self, CalcBase._meta_attr) @@ -108,7 +107,7 @@ def __init__(self): ) #: calculations self.calcs = {} - for k, v in parameters.iteritems(): + for k, v in parameters.items(): self.calcs[k] = { key: v[key] for key in ('formula', 'args', 'returns') } diff --git a/carousel/core/calculations.py.bak b/carousel/core/calculations.py.bak new file mode 100644 index 0000000..acda162 --- /dev/null +++ b/carousel/core/calculations.py.bak @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +""" +This module provides base classes for calculations. All calculations should +inherit from one of the calcs in this module. +""" + +from carousel.core import logging, CommonBase, Registry, UREG, Parameter +from carousel.core.calculators import Calculator + +LOGGER = logging.getLogger(__name__) + + +class CalcParameter(Parameter): + """ + Fields for calculations. + """ + _attrs = ['dependencies', 'always_calc', 'frequency', 'formula', 'args', + 'returns', 'calculator', 'is_dynamic'] + + +class CalcRegistry(Registry): + """ + A registry for calculations. Each key is a calculation. The value + of each calculation is split into 2 dictionaries: "static" and + "dynamic". Static calculations occur once at the beginning of a simulation + and dynamic calculations occur at every interval. The contents of either + the "static" or "dynamic" key is an ordered list of formulas, their + arguments and return values. + + Calculations can list `dependencies `_ that must be + calculated first. Calculations marked as `always_calc` will not be limited + by thresholds set in simulations. The frequency determines how often to + dynamic calculations occur. Frequency can be given in intervals or can list + a quantity of time, _EG:_ ``2 * UREG.hours``. + """ + #: meta names + meta_names = ['dependencies', 'always_calc', 'frequency', 'calculator', + 'is_dynamic', 'calc_source'] + + def register(self, new_calc, *args, **kwargs): + """ + Register calculations and meta data. + + * ``dependencies`` - list of prerequisite calculations + * ``always_calc`` - ``True`` if calculation ignores thresholds + * ``frequency`` - frequency of calculation in intervals or units of time + + :param new_calc: register new calculation + """ + kwargs.update(zip(self.meta_names, args)) + # dependencies should be a list of other calculations + if isinstance(kwargs['dependencies'], basestring): + kwargs['dependencies'] = [kwargs['dependencies']] + # call super method, now meta can be passed as args or kwargs. + super(CalcRegistry, self).register(new_calc, **kwargs) + + +class CalcBase(CommonBase): + """ + Base calculation meta class. + """ + _path_attr = 'calcs_path' + _file_attr = 'calcs_file' + _param_cls = CalcParameter + + def __new__(mcs, name, bases, attr): + # use only with Calc subclasses + if not CommonBase.get_parents(bases, CalcBase): + return super(CalcBase, mcs).__new__(mcs, name, bases, attr) + # set _meta combined from bases + attr = mcs.set_meta(bases, attr) + # set param file full path if calculations path and file specified or + # try to set parameters from class attributes except private/magic + attr = mcs.set_param_file_or_parameters(attr) + return super(CalcBase, mcs).__new__(mcs, name, bases, attr) + + +class Calc(object): + """ + A class for all calculations. + """ + __metaclass__ = CalcBase + + def __init__(self): + meta = getattr(self, CalcBase._meta_attr) + parameters = getattr(self, CalcBase._param_attr) + #: ``True`` if always calculated (day and night) + self.always_calc = dict.fromkeys( + parameters, getattr(meta, 'always_calc', False) + ) + freq = getattr(meta, 'frequency', [1, '']) + #: frequency calculation is calculated in intervals or units of time + self.frequency = dict.fromkeys(parameters, freq[0] * UREG(str(freq[1]))) + #: dependencies + self.dependencies = dict.fromkeys( + parameters, getattr(meta, 'dependencies', []) + ) + #: name of :class:`Calc` superclass + self.calc_source = dict.fromkeys(parameters, self.__class__.__name__) + #: calculator + self.calculator = dict.fromkeys( + parameters, getattr(meta, 'calculator', Calculator) + ) + #: ``True`` if calculations are dynamic, ``False`` if static + self.is_dynamic = dict.fromkeys( + parameters, getattr(meta, 'is_dynamic', False) + ) + #: calculations + self.calcs = {} + for k, v in parameters.iteritems(): + self.calcs[k] = { + key: v[key] for key in ('formula', 'args', 'returns') + } + keys = ('dependencies', 'always_calc', 'frequency', 'calculator', + 'is_dynamic') + for key in keys: + value = v.get(key) + if value is not None: + getattr(self, key)[k] = value diff --git a/carousel/core/calculators.py b/carousel/core/calculators.py index d83d6d5..2d4b9b2 100644 --- a/carousel/core/calculators.py +++ b/carousel/core/calculators.py @@ -65,14 +65,14 @@ def index_registry(args, reg, ts=None, idx=None): # into data, not necessary for outputs since that will conform to idx rargs = dict.fromkeys(args) # make dictionary from arguments # iterate over arguments - for k, v in args.iteritems(): + for k, v in args.items(): # var ------------------ states ------------------ # idx ===== not None ===== ======= None ======= # isconstant True False None True False None # is_dynamic no yes yes no no no is_dynamic = idx and not reg.isconstant.get(v) # switch based on string type instead of sequence - if isinstance(v, basestring): + if isinstance(v, str): # the default assumes the current index rargs[k] = reg[v][idx] if is_dynamic else reg[v] elif len(v) < 3: @@ -118,7 +118,7 @@ def get_covariance(datargs, outargs, vargs, datvar, outvar): argn = len(vargs) # number of observations must be the same for all vargs nobs = 1 - for m in xrange(argn): + for m in range(argn): a = vargs[m] try: a = datargs[a] @@ -127,7 +127,7 @@ def get_covariance(datargs, outargs, vargs, datvar, outvar): avar = outvar[a] else: avar = datvar[a] - for n in xrange(argn): + for n in range(argn): b = vargs[n] try: b = datargs[b] @@ -141,7 +141,7 @@ def get_covariance(datargs, outargs, vargs, datvar, outvar): # covariance matrix is initially zeros cov = np.zeros((nobs, argn, argn)) # loop over arguments in both directions, fill in covariance - for m in xrange(argn): + for m in range(argn): a = vargs[m] try: a = datargs[a] @@ -150,7 +150,7 @@ def get_covariance(datargs, outargs, vargs, datvar, outvar): avar = outvar[a] else: avar = datvar[a] - for n in xrange(argn): + for n in range(argn): b = vargs[n] try: b = datargs[b] @@ -218,18 +218,18 @@ def calculate(cls, calc, formula_reg, data_reg, out_reg, ) # use magnitudes if quantities cov = (np.swapaxes((cov.T * scale), 0, 1) * scale).T nret = len(retval) # number of return output - for m in xrange(nret): + for m in range(nret): a = returns[m] # name in output registry out_reg.variance[a] = {} out_reg.uncertainty[a] = {} out_reg.jacobian[a] = {} - for n in xrange(nret): + for n in range(nret): b = returns[n] out_reg.variance[a][b] = cov[:, m, n] if a == b: unc = np.sqrt(cov[:, m, n]) * 100 * UREG.percent out_reg.uncertainty[a][b] = unc - for n in xrange(len(vargs)): + for n in range(len(vargs)): b = vargs[n] try: b = datargs[b] @@ -246,7 +246,7 @@ def calculate(cls, calc, formula_reg, data_reg, out_reg, if len(returns) > 1: # more than one return, zip them up if idx is None: - out_reg.update(zip(returns, retval)) + out_reg.update(list(zip(returns, retval))) else: for k, v in zip(returns, retval): out_reg[k][idx] = v diff --git a/carousel/core/calculators.py.bak b/carousel/core/calculators.py.bak new file mode 100644 index 0000000..d83d6d5 --- /dev/null +++ b/carousel/core/calculators.py.bak @@ -0,0 +1,258 @@ +""" +Calculators are used to execute calculations. +""" + +from carousel.core import logging, UREG +import numpy as np + +LOGGER = logging.getLogger(__name__) + + +def index_registry(args, reg, ts=None, idx=None): + """ + Index into a :class:`~carousel.core.Registry` to return arguments + from :class:`~carousel.core.data_sources.DataRegistry` and + :class:`~carousel.core.outputs.OutputRegistry` based on the + calculation parameter file. + + :param args: Arguments field from the calculation parameter file. + :param reg: Registry in which to index to get the arguments. + :type reg: :class:`~carousel.core.data_sources.DataRegistry`, + :class:`~carousel.core.outputs.OutputRegistry` + :param ts: Time step [units of time]. + :param idx: [None] Index of current time step for dynamic calculations. + + Required arguments for static and dynamic calculations are specified in the + calculation parameter file by the "args" key. Arguments can be from + either the data registry or the outputs registry, which is denoted by the + "data" and "outputs" keys. Each argument is a dictionary whose key is the + name of the argument in the formula specified and whose value can be one of + the following: + + * The name of the argument in the registry :: + + {"args": {"outputs": {"T_bypass": "T_bypass_diode"}}} + + maps the formula argument "T_bypass" to the outputs registry item + "T_bypass_diode". + + * A list with the name of the argument in the registry as the first element + and a negative integer denoting the index relative to the current + timestep as the second element :: + + {"args": {"data": {"T_cell": ["Tcell", -1]}}} + + indexes the previous timestep of "Tcell" from the data registry. + + * A list with the name of the argument in the registry as the first element + and a list of positive integers denoting the index into the item from the + registry as the second element :: + + {"args": {"data": {"cov": ["bypass_diode_covariance", [2]]}}} + + indexes the third element of "bypass_diode_covariance". + + * A list with the name of the argument in the registry as the first + element, a negative real number denoting the time relative to the current + timestep as the second element, and the units of the time as the third :: + + {"args": {"data": {"T_cell": ["Tcell", -1, 'day']}}} + + indexes the entire previous day of "Tcell". + """ + # TODO: move this to new Registry method or __getitem__ + # TODO: replace idx with datetime object and use timeseries to interpolate + # into data, not necessary for outputs since that will conform to idx + rargs = dict.fromkeys(args) # make dictionary from arguments + # iterate over arguments + for k, v in args.iteritems(): + # var ------------------ states ------------------ + # idx ===== not None ===== ======= None ======= + # isconstant True False None True False None + # is_dynamic no yes yes no no no + is_dynamic = idx and not reg.isconstant.get(v) + # switch based on string type instead of sequence + if isinstance(v, basestring): + # the default assumes the current index + rargs[k] = reg[v][idx] if is_dynamic else reg[v] + elif len(v) < 3: + if reg.isconstant[v[0]]: + # only get indices specified by v[1] + # tuples interpreted as a list of indices, see + # NumPy basic indexing: Dealing with variable + # numbers of indices within programs + rargs[k] = reg[v[0]][tuple(v[1])] + elif v[1] < 0: + # specified offset from current index + rargs[k] = reg[v[0]][idx + v[1]] + else: + # get indices specified by v[1] at current index + rargs[k] = reg[v[0]][idx][tuple(v[1])] + else: + # specified timedelta from current index + dt = 1 + (v[1] * UREG(str(v[2])) / ts).item() + # TODO: deal with fractions of timestep + rargs[k] = reg[v[0]][(idx + dt):(idx + 1)] + return rargs + + +class Calculator(object): + """ + Base class for calculators. Must implement ``calculate`` method. + """ + shortname = '' + + @staticmethod + def get_covariance(datargs, outargs, vargs, datvar, outvar): + """ + Get covariance matrix. + + :param datargs: data arguments + :param outargs: output arguments + :param vargs: variable arguments + :param datvar: variance of data arguments + :param outvar: variance of output arguments + :return: covariance + """ + # number of formula arguments that are not constant + argn = len(vargs) + # number of observations must be the same for all vargs + nobs = 1 + for m in xrange(argn): + a = vargs[m] + try: + a = datargs[a] + except (KeyError, TypeError): + a = outargs[a] + avar = outvar[a] + else: + avar = datvar[a] + for n in xrange(argn): + b = vargs[n] + try: + b = datargs[b] + except (KeyError, TypeError): + b = outargs[b] + c = avar.get(b, 0.0) + try: + nobs = max(nobs, len(c)) + except (TypeError, ValueError): + LOGGER.debug('c of %s vs %s = %g', a, b, c) + # covariance matrix is initially zeros + cov = np.zeros((nobs, argn, argn)) + # loop over arguments in both directions, fill in covariance + for m in xrange(argn): + a = vargs[m] + try: + a = datargs[a] + except (KeyError, TypeError): + a = outargs[a] + avar = outvar[a] + else: + avar = datvar[a] + for n in xrange(argn): + b = vargs[n] + try: + b = datargs[b] + except (KeyError, TypeError): + b = outargs[b] + cov[:, m, n] = avar.get(b, 0.0) + if nobs == 1: + cov = cov.squeeze() # squeeze out any extra dimensions + LOGGER.debug('covariance:\n%r', cov) + return cov + + @classmethod + def calculate(cls, calc, formula_reg, data_reg, out_reg, + timestep=None, idx=None): + """ + Execute calculation + + :param calc: calculation, with formula, args and return keys + :type calc: dict + :param formula_reg: Registry of formulas. + :type formula_reg: :class:`~carousel.core.FormulaRegistry` + :param data_reg: Data registry. + :type data_reg: :class:`~carousel.core.data_sources.DataRegistry` + :param out_reg: Outputs registry. + :type out_reg: :class:`~carousel.core.outputs.OutputRegistry` + :param timestep: simulation interval length [time], default is ``None`` + :param idx: interval index, default is ``None`` + :type idx: int + """ + # get the formula-key from each static calc + formula = calc['formula'] # name of formula in calculation + func = formula_reg[formula] # formula function object + fargs = formula_reg.args.get(formula, []) # formula arguments + constants = formula_reg.isconstant.get(formula) # constant args + # formula arguments that are not constant + vargs = [] if constants is None else [a for a in fargs if a not in constants] + args = calc['args'] # calculation arguments + # separate data and output arguments + datargs, outargs = args.get('data', {}), args.get('outputs', {}) + data = index_registry(datargs, data_reg, timestep, idx) + outputs = index_registry(outargs, out_reg, timestep, idx) + kwargs = dict(data, **outputs) # combined data and output args + args = [kwargs.pop(a) for a in fargs if a in kwargs] + returns = calc['returns'] # return arguments + # if constants is None then the covariance should also be None + # TODO: except other values, eg: "all" to indicate no covariance + if constants is None: + cov = None # do not propagate uncertainty + else: + # get covariance matrix + cov = cls.get_covariance(datargs, outargs, vargs, + data_reg.variance, out_reg.variance) + # update kwargs with covariance if it exists + kwargs['__covariance__'] = cov + retval = func(*args, **kwargs) # calculate function + # update output registry with covariance and jacobian + if cov is not None: + # split uncertainty and jacobian from return values + cov, jac = retval[-2:] + retval = retval[:-2] + # scale covariance + scale = np.asarray( + [1 / r.m if isinstance(r, UREG.Quantity) else 1 / r + for r in retval] + ) # use magnitudes if quantities + cov = (np.swapaxes((cov.T * scale), 0, 1) * scale).T + nret = len(retval) # number of return output + for m in xrange(nret): + a = returns[m] # name in output registry + out_reg.variance[a] = {} + out_reg.uncertainty[a] = {} + out_reg.jacobian[a] = {} + for n in xrange(nret): + b = returns[n] + out_reg.variance[a][b] = cov[:, m, n] + if a == b: + unc = np.sqrt(cov[:, m, n]) * 100 * UREG.percent + out_reg.uncertainty[a][b] = unc + for n in xrange(len(vargs)): + b = vargs[n] + try: + b = datargs[b] + except (KeyError, TypeError): + b = outargs[b] + out_reg.jacobian[a][b] = jac[:, m, n] + LOGGER.debug('%s cov:\n%r', a, out_reg.variance[a]) + LOGGER.debug('%s jac:\n%r', a, out_reg.jacobian[a]) + LOGGER.debug('%s unc:\n%r', a, out_reg.uncertainty[a]) + # if there's only one return value, squeeze out extra dimensions + if len(retval) == 1: + retval = retval[0] + # put return values into output registry + if len(returns) > 1: + # more than one return, zip them up + if idx is None: + out_reg.update(zip(returns, retval)) + else: + for k, v in zip(returns, retval): + out_reg[k][idx] = v + else: + # only one return, get it by index at 0 + if idx is None: + out_reg[returns[0]] = retval + else: + out_reg[returns[0]][idx] = retval diff --git a/carousel/core/data_readers.py b/carousel/core/data_readers.py index 378fffb..0033a44 100644 --- a/carousel/core/data_readers.py +++ b/carousel/core/data_readers.py @@ -5,7 +5,7 @@ which are used to read in data sources. """ -from StringIO import StringIO +from io import StringIO from carousel.core import UREG, Q_ from carousel.core.exceptions import ( UnnamedDataError, MixedTextNoMatchError @@ -170,7 +170,7 @@ def apply_units_to_cache(self, data): :return: data with units applied :rtype: :class:`~pint.unit.Quantity` """ - for k, val in self.parameters.iteritems(): + for k, val in self.parameters.items(): if 'units' in val: data[k] = Q_(data[k], val.get('units')) return data @@ -254,7 +254,7 @@ def load_data(self, filename, *args, **kwargs): data = {} # an empty dictionary to store data # iterate through sheets in parameters # iterate through the parameters on each sheet - for param, pval in self.parameters.iteritems(): + for param, pval in self.parameters.items(): sheet = pval['extras']['sheet'] # get each worksheet from the workbook worksheet = workbook.sheet_by_name(sheet) @@ -280,7 +280,7 @@ def load_data(self, filename, *args, **kwargs): # if both elements are `list` then parameter is 2-D else: datum = [] - for col in xrange(prng0[1], prng1[1]): + for col in range(prng0[1], prng1[1]): datum.append(worksheet.col_values(col, prng0[0], prng1[0])) # duck typing that datum is real @@ -296,7 +296,7 @@ def load_data(self, filename, *args, **kwargs): # all([]) == True but any([]) == False if not datum: data[param] = None # convert empty to None - elif all(isinstance(_, basestring) for _ in datum): + elif all(isinstance(_, str) for _ in datum): data[param] = datum # all str is OK (EG all 'TMY') elif all(not _ for _ in datum): data[param] = None # convert list of empty to None @@ -319,7 +319,7 @@ def apply_units_to_cache(self, data): """ # iterate through sheets in parameters # iterate through the parameters on each sheet - for param, pval in self.parameters.iteritems(): + for param, pval in self.parameters.items(): # try to apply units try: data[param] *= UREG(str(pval.get('units') or '')) @@ -521,14 +521,14 @@ def _apply_units_to_numpy_data_readers(parameters, data): # dictionary of header field parameters header_fields = {field[0]: field[1:] for field in fields} # loop over fieldnames - for k, val in header_fields.iteritems(): + for k, val in header_fields.items(): # check for units in header field parameters if len(val) > 1: data[k] *= UREG(str(val[1])) # apply units # apply other data units data_units = parameters['data'].get('units') # default is None if data_units: - for k, val in data_units.iteritems(): + for k, val in data_units.items(): data[k] *= UREG(str(val)) # apply units return data @@ -577,16 +577,16 @@ def _read_header(f, header_param): header_reader = csv.DictReader(header_str, header_names, delimiter=header_delim, skipinitialspace=True) - data = header_reader.next() # parse the header dictionary + data = next(header_reader) # parse the header dictionary # iterate over items in data - for k, v in data.iteritems(): + for k, v in data.items(): header_type = header_fields[k][0] # spec'd type # whitelist header types - if isinstance(header_type, basestring): + if isinstance(header_type, str): if header_type.lower().startswith('int'): header_type = int # coerce to integer elif header_type.lower().startswith('long'): - header_type = long # coerce to long integer + header_type = int # coerce to long integer elif header_type.lower().startswith('float'): header_type = float # to floating decimal point elif header_type.lower().startswith('str'): @@ -666,7 +666,7 @@ def __init__(self, parameters): parameter_sheets = self.parameterization['parameter']['sheets'] for n, sheet in enumerate(parameter_sheets): new_parameters[sheet] = {} # empty dictionary for sheet data - for k, v in self.parameterization['data'].iteritems(): + for k, v in self.parameterization['data'].items(): new_parameters[sheet][k + '_' + str(n)] = v super(ParameterizedXLS, self).__init__(new_parameters) # filename is instance attribute of XLRDReader @@ -688,7 +688,7 @@ def load_data(self, filename, *args, **kwargs): for key in self.parameterization['data']: units = str(self.parameterization['data'][key].get('units')) or '' datalist = [] - for n in xrange(num_sheets): + for n in range(num_sheets): k = key + '_' + str(n) datalist.append(data[k].reshape((1, -1))) data.pop(k) # remove unused data keys @@ -778,9 +778,9 @@ def load_data(self, filename, *args, **kwargs): # load text data data = super(MixedTextXLS, self).load_data(filename) # iterate through sheets in parameters - for sheet_params in self.parameters.itervalues(): + for sheet_params in self.parameters.values(): # iterate through the parameters on each sheet - for param, pval in sheet_params.iteritems(): + for param, pval in sheet_params.items(): pattern = pval.get('pattern', EFG_PATTERN) # get pattern re_meth = pval.get('method', 'search') # get re method # whitelist re methods, getattr could be considered harmful diff --git a/carousel/core/data_readers.py.bak b/carousel/core/data_readers.py.bak new file mode 100644 index 0000000..378fffb --- /dev/null +++ b/carousel/core/data_readers.py.bak @@ -0,0 +1,805 @@ +# -*- coding: utf-8 -*- +""" +This module provides the base classes for data readers, such as +`XLRD `_ and :func:`numpy.loadtxt`, +which are used to read in data sources. +""" + +from StringIO import StringIO +from carousel.core import UREG, Q_ +from carousel.core.exceptions import ( + UnnamedDataError, MixedTextNoMatchError +) +from xlrd import open_workbook +import csv +import numpy as np +import json +import os +import time +import re + +# regex pattern for %e, %E, %f and %g +# http://docs.python.org/2/library/re.html#simulating-scanf +# use (?...) for non capturing groups +EFG_PATTERN = '([-+]?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][-+]?\\d+)?)' +# whitelist regex methods +RE_METH = ['search', 'match', 'findall', 'split'] + + +class DataReader(object): + """ + Required interface for all Carousel data readers. + + :param parameters: parameters to be read + :type parameters: dict + """ + #: True if reader accepts ``filename`` argument + is_file_reader = True # overload in subclasses + + def __init__(self, parameters, meta=None): + #: parameters to be read by reader + self.parameters = parameters + #: meta if any + self.meta = meta + + def load_data(self, *args, **kwargs): + """ + Load data from source using reader. This method must be implemented by + each data reader. + + :param args: positional arguments + :param kwargs: keyword arguments + :returns: data read by :class:`DataReader` + :rtype: dict + :raises: :exc:`~exceptions.NotImplementedError` + """ + raise NotImplementedError('load_data') + + def apply_units_to_cache(self, data): + """ + Apply units to cached data. This method must be implemented by each data + reader. + + :param data: cached data + :return: data with units applied + :rtype: :class:`~pint.unit.Quantity` + :raises: :exc:`~exceptions.NotImplementedError` + """ + raise NotImplementedError('apply_units_to_cache') + + +class JSONReader(DataReader): + """ + Read data from a JSON file. + + :param parameters: parameters to read + :type parameters: dict + :param data_reader: original :class:`DataReader` if data cached as JSON + + This the default data reader if not specified in the data source. The format + of the data is similar to the dictionary used to create the data registry, + except without units. + + For example:: + + { + "data": { + "DNI": [834, 523, 334, 34, 0, 0], + "zenith": [21, 28, 45, 79, 90, 90] + }, + "param_file": "path/to/corresponding/param_file.json", + "data_source": "MyDataSource" + } + + Parameters can be specified in a JSON file. :: + + { + "DNI": { + "description": "direct normal insolation", + "units": "W/m*^2", + "isconstant": false + }, + "zenith": { + "description": "solar zenith", + "units": "degrees", + "isconstant": false + } + } + + Parameters can also be specified in the data source as class attributes. :: + + class MyDataSrc(DataSource): + data_reader = JSONReader + DNI = { + "description": "direct normal insolation", + "units": "W/m*^2", + "isconstant": false + } + zenith = { + "description": "solar zenith", + "units": "degrees", + "isconstant": false + } + + """ + def __init__(self, parameters, meta=None): + super(JSONReader, self).__init__(parameters, meta) + #: original data reader [None] + self.orig_data_reader = meta.data_reader + + def load_data(self, filename, *args, **kwargs): + """ + Load JSON data. + + :param filename: name of JSON file with data + :type filename: str + :return: data + :rtype: dict + """ + # append .json extension if needed + if not filename.endswith('.json'): + filename += '.json' # append "json" to filename + # open file and load JSON data + with open(filename, 'r') as fid: + json_data = json.load(fid) + # if JSONReader is the original reader then apply units and return + if (not self.orig_data_reader or + isinstance(self, self.orig_data_reader)): + return self.apply_units_to_cache(json_data['data']) + # last modification since JSON file was saved + utc_mod_time = json_data.get('utc_mod_time') + # instance of original data reader with original parameters + orig_data_reader_obj = self.orig_data_reader(self.parameters, self.meta) + # check if file has been modified since saved as JSON file + if utc_mod_time: + # convert to ordered tuple + utc_mod_time = time.struct_time(utc_mod_time) + orig_filename = filename[:-5] # original filename + # use original file if it's been modified since JSON file saved + if utc_mod_time < time.gmtime(os.path.getmtime(orig_filename)): + os.remove(filename) # delete JSON file + return orig_data_reader_obj.load_data(orig_filename) + # use JSON file if original file hasn't been modified + return orig_data_reader_obj.apply_units_to_cache(json_data['data']) + + def apply_units_to_cache(self, data): + """ + Apply units to data read using :class:`JSONReader`. + + :param data: cached data + :return: data with units applied + :rtype: :class:`~pint.unit.Quantity` + """ + for k, val in self.parameters.iteritems(): + if 'units' in val: + data[k] = Q_(data[k], val.get('units')) + return data + + +class XLRDReader(DataReader): + """ + Read data using XLRD. + + The :attr:`~DataReader.parameters` argument is a dictionary. Each item is a + dictionary that contains the following keys: "description", "units", "range" + and "sheet". The parameters argument should be the parameters argument + created by a :class:`~carousel.core.data_sources.DataSource` and the values + of each item should be a :class:`~carousel.core.data_sources.DataParameter`. + Therefore any non-parameter attributes like "range" and "sheet" would be in + "extras". + + If the range is a ... + + * single cell -- use [rowx, colx]. + * 2-D range -- use 2 arrays, [start, stop], each with [rowx, colx]. + * column slice -- use an array and an int, [slice, colx], in which slice is + [start-rowx, stop-rowx]. Set stop-rowx to ``None`` to read the rest of + the column after start-rowx. + * row slice -- use [rowx, slice] in which slice is [start-colx, stop-colx]. + Set stop-colx to ``None`` to read the rest of the row after start-colx. + * column -- use [None, colx] or [[], colx] + * row -- use [rowx, None] or [rowx, []] + + .. seealso:: + `The xlrd Module `_ + + Example of :attr:`~DataReader.parameters`:: + + parameters = { + "month": { + "description": "month of year", + "units": "month", + "range": [[2, 8762], 2], + "sheet": "Level 1 Outputs" + }, + "day": { + "description": "day of month", + "units': "day", + "range': [[2, 8762], 3]} + "sheet": "Level 1 Outputs" + }, + "PAC": { + "description": "AC power", + "units": "kW", + "range": [[2, 8762], 12], + "sheet": "Level 2 Outputs" + }, + "PDC": { + "description": "DC power", + "units": "kW", + "range": [[2, 8762], 13], + "sheet": "Level 2 Outputs" + } + } + + This loads "month" and "day" data from columns 2 and 3 in the "Level 1 + Outputs" sheet and "PAC" and "PDC" data from columns 12 and 13 in the + "Level 2 Outputs" sheets. The units for each data set and a description is + also given. Each of the data columns is 8760 rows long, from row 2 to row + 8762. Don't forget that indexing starts at 0, so row 2 is the 3rd row. + """ + + def load_data(self, filename, *args, **kwargs): + """ + Load parameters from Excel spreadsheet. + + :param filename: Name of Excel workbook with data. + :type filename: str + :returns: Data read from Excel workbook. + :rtype: dict + """ + # workbook read from file + workbook = open_workbook(filename, verbosity=True) + data = {} # an empty dictionary to store data + # iterate through sheets in parameters + # iterate through the parameters on each sheet + for param, pval in self.parameters.iteritems(): + sheet = pval['extras']['sheet'] + # get each worksheet from the workbook + worksheet = workbook.sheet_by_name(sheet) + # split the parameter's range elements + prng0, prng1 = pval['extras']['range'] + # missing "units", json ``null`` and Python ``None`` all OK! + # convert to str from unicode, None to '' (dimensionless) + punits = str(pval.get('units') or '') + # replace None with empty list + if prng0 is None: + prng0 = [] + if prng1 is None: + prng1 = [] + # FIXME: Use duck-typing here instead of type-checking! + # if both elements in range are `int` then parameter is a cell + if isinstance(prng0, int) and isinstance(prng1, int): + datum = worksheet.cell_value(prng0, prng1) + # if the either element is a `list` then parameter is a slice + elif isinstance(prng0, list) and isinstance(prng1, int): + datum = worksheet.col_values(prng1, *prng0) + elif isinstance(prng0, int) and isinstance(prng1, list): + datum = worksheet.row_values(prng0, *prng1) + # if both elements are `list` then parameter is 2-D + else: + datum = [] + for col in xrange(prng0[1], prng1[1]): + datum.append(worksheet.col_values(col, prng0[0], + prng1[0])) + # duck typing that datum is real + try: + npdatum = np.array(datum, dtype=np.float) + except ValueError as err: + # check for iterable: + # if `datum` can't be coerced to float, then it must be + # *string* & strings *are* iterables, so don't check! + # check for strings: + # data must be real or *all* strings! + # empty string, None or JSON null also OK + # all([]) == True but any([]) == False + if not datum: + data[param] = None # convert empty to None + elif all(isinstance(_, basestring) for _ in datum): + data[param] = datum # all str is OK (EG all 'TMY') + elif all(not _ for _ in datum): + data[param] = None # convert list of empty to None + else: + raise err # raise ValueError if not all real or str + else: + data[param] = npdatum * UREG(punits) + # FYI: only put one statement into try-except test otherwise + # might catch different error than expected. use ``else`` as + # option to execute only if exception *not* raised. + return data + + def apply_units_to_cache(self, data): + """ + Apply units to cached data read using :class:`JSONReader`. + + :param data: Cached data. + :type data: dict + :return: data with units + """ + # iterate through sheets in parameters + # iterate through the parameters on each sheet + for param, pval in self.parameters.iteritems(): + # try to apply units + try: + data[param] *= UREG(str(pval.get('units') or '')) + except TypeError: + continue + return data + + +class NumPyLoadTxtReader(DataReader): + """ + Read data using :func:`numpy.loadtxt` function. + + The :attr:`~DataReader.parameters` argument is a dictionary that must have + a "data" key. An additional "header" is optional; see :func:`_read_header`. + + The "data" key provides arguments to :func:`numpy.loadtxt`. The "dtype" key + must be specified, as names are required for all data in Carousel. Some + of the other :func:`numpy.loadtxt` arguments: "delimiter" and "skiprows" can + also be specified as keys. In addition "units" can also be specified in a + dictionary in which the keys are the names of the data output by + :func:`numpy.loadtxt`. Converters are not permitted. The "usecols" + argument is also not used since :func:`numpy.loadtxt` states that "the + number of columns used must match the number of fields in the data-type" + and "dtype" is already specified. The other arguments, "fname", "comments", + "unpack" and "ndmin" are also not used. + + Example of :attr:`~DataReader.parameters`:: + + parameters = { + 'header': { + 'delimiter': ',', + 'fields': [ + ['Name', 'str'], + ['Latitude', 'float', 'arcdegree'], + ['Longitude', 'float', 'arcdegree']]}, + 'data': { + 'dtype': [ + ['Date', '(3,)int'], ['Time', '(2,)int'], + ['GHI', 'float'], ['DNI', 'float'], ['DHI', 'float']], + 'units': { + 'GHI': 'W/m**2', 'DNI': 'W/m**2', 'DHI': 'W/m**2'}, + 'usecols': [0, 1, 4, 7, 10]}} + + This loads a header with 3 fields followed by 5 columns of data, converting + the 1st column, "Date", to a 3-element tuple of ``int`` and the 2nd column, + "Time", to a 2-element tuple of ``int``. + """ + + def load_data(self, filename, *args, **kwargs): + """ + load data from text file. + + :param filename: name of text file to read + :type filename: str + :returns: data read from file using :func:`numpy.loadtxt` + :rtype: dict + """ + # header keys + header_param = self.parameters.get('header') # default is None + # data keys + data_param = self.parameters['data'] # raises KeyError if no 'data' + dtype = data_param['dtype'] # raises KeyError if no 'dtype' + # convert to tuple and normal ASCII + _utf8_list_to_ascii_tuple(dtype) if dtype else None # -> tuple of str + delimiter = data_param.get('delimiter') # default is None + skiprows = data_param.get('skiprows') # default is None + data_units = data_param.get('units', {}) # default is an empty dict + data = {} # a dictionary for data + # open file for reading + with open(filename, 'r') as fid: + # read header + if header_param: + data.update(_read_header(fid, header_param)) + fid.seek(0) # move cursor back to beginning + # read data + data_data = np.loadtxt(fid, dtype, delimiter=delimiter, + skiprows=skiprows) + # apply units + data.update(_apply_units(data_data, data_units, fid.name)) + return data + + def apply_units_to_cache(self, data): + """ + Apply units to data originally loaded by :class:`NumPyLoadTxtReader`. + """ + return _apply_units_to_numpy_data_readers(self.parameters, data) + + +class NumPyGenFromTxtReader(DataReader): + """ + Read data using :func:`numpy.genfromtxt` function. + + The :attr:`~DataReader.parameters` argument is a dictionary that must have + a "data" key. An additional "header" is optional; see :func:`_read_header`. + + The "data" key provides arguments to :func:`numpy.genfromtxt`. Either the + "dtype" or "names" key must be specified, as names are required for all + data in Carousel. Some of the other :func:`numpy.genfromtxt` arguments: + "delimiter", "skip_header", "usecols", "excludelist" and "deletechars" can + also be specified as keys. In addition "units" can also be specified in a + dictionary in which the keys are the names of the data output by + :func:`numpy.genfromtxt`. Converters are not permitted. The other + arguments, "fname", "comments", "skip_footer", "missing_values", + "filling_values", "defaultfmt", "autostrip", "replace_space", + "case_sensitive", "unpack", "usemask" and "invalid_raise" are also not + used. + + If the data names are not specified in the "dtypes" key or "names" key, + then :meth:`~NumPyGenFromTxtReader.load_data` will raise an exception, + :exc:`~carousel.core.exceptions.UnnamedDataError`. + + .. seealso:: + `Importing data with genfromtxt \ + `_ + + Example of :attr:`~DataReader.parameters`:: + + parameters = { + 'header': { + 'delimiter': ' ', + 'fields': [ + ['city', 'str'], ['state', 'str'], + ["timezone", 'int'], ["elevation", 'int', 'meters']]}, + 'data': { + 'delimiter': 4, + 'names': ['DNI', 'DHI', 'GHI'], + 'units': {'DNI': 'W/m**2', 'DHI': 'W/m**2', 'GHI': 'W/m**2'}}} + + This loads a header that is delimited by whitespace, followed by data in + three fixed-width columns all 4-digit floats. + """ + + def load_data(self, filename, *args, **kwargs): + """ + load data from text file. + + :param filename: name of file to read + :type filename: str + :returns: data read from file using :func:`numpy.genfromtxt` + :rtype: dict + :raises: :exc:`~carousel.core.exceptions.UnnamedDataError` + """ + # header keys + header_param = self.parameters.get('header') # default is None + # data keys + data_param = self.parameters['data'] # raises KeyError if no 'data' + dtype = data_param.get('dtype') # default is None + # if not None convert to tuple and normal ASCII + _utf8_list_to_ascii_tuple(dtype) if dtype else None # -> tuple of str + delimiter = data_param.get('delimiter') # default is None + skip_header = data_param.get('skip_header') # default is None + usecols = data_param.get('usecols') # default is None + names = data_param.get('names') # default is None + names = [str(_) for _ in names] if names else None # -> str + excludelist = data_param.get('excludelist') # default is None + deletechars = data_param.get('deletechars') # default is None + data_units = data_param.get('units', {}) # default is an empty dict + # either dtype or names must be specified + if not (dtype or names): + raise UnnamedDataError(filename) + data = {} # a dictionary for data + # open file for reading + with open(filename, 'r') as fid: + # read header + if header_param: + data.update(_read_header(fid, header_param)) + fid.seek(0) # move cursor back to beginning + # data + data_data = np.genfromtxt(fid, dtype, delimiter=delimiter, + skip_header=skip_header, usecols=usecols, + names=names, excludelist=excludelist, + deletechars=deletechars) + # apply units + data.update(_apply_units(data_data, data_units, fid.name)) + return data + + def apply_units_to_cache(self, data): + """ + Apply units to data originally loaded by :class:`NumPyLoadTxtReader`. + """ + return _apply_units_to_numpy_data_readers(self.parameters, data) + + +def _apply_units_to_numpy_data_readers(parameters, data): + """ + Apply units to data originally loaded by :class:`NumPyLoadTxtReader` or + :class:`NumPyGenFromTxtReader`. + + :param parameters: Dictionary of data source parameters read from JSON + file. + :type parameters: dict + :param data: Dictionary of data read + """ + # apply header units + header_param = parameters.get('header') # default is None + # check for headers + if header_param: + fields = header_param['fields'] # header fields + # dictionary of header field parameters + header_fields = {field[0]: field[1:] for field in fields} + # loop over fieldnames + for k, val in header_fields.iteritems(): + # check for units in header field parameters + if len(val) > 1: + data[k] *= UREG(str(val[1])) # apply units + # apply other data units + data_units = parameters['data'].get('units') # default is None + if data_units: + for k, val in data_units.iteritems(): + data[k] *= UREG(str(val)) # apply units + return data + + +def _read_header(f, header_param): + """ + Read and parse data from 1st line of a file. + + :param f: :func:`file` or :class:`~StringIO.StringIO` object from which to + read 1st line. + :type f: file + :param header_param: Parameters used to parse the data from the header. + Contains "delimiter" and "fields". + :type header_param: dict + :returns: Dictionary of data read from header. + :rtype: dict + :raises: :exc:`~carousel.core.exceptions.UnnamedDataError` + + The **header_param** argument contains keys to read the 1st line of **f**. + If "delimiter" is ``None`` or missing, the default delimiter is a comma, + otherwise "delimiter" can be any single character, integer or sequence of + ``int``. + + * single character -- a delimiter + * single integer -- uniform fixed width + * sequence of ``int`` -- fixed widths, the number of fields should \ + correspond to the length of the sequence. + + The "fields" key is a list of (parameter-name, parameter-type[, parameter- + units]) lists. + """ + # default delimiter is a comma, can't be None + header_delim = str(header_param.get('delimiter', ',')) + # don't allow unnamed fields + if 'fields' not in header_param: + raise UnnamedDataError(f.name) + header_fields = {field[0]: field[1:] for field in header_param['fields']} + # header_names can't be generator b/c DictReader needs list, and can't be + # dictionary b/c must be same order as 'fields' to match data readby csv + header_names = [field[0] for field in header_param['fields']] + # read header + header_str = StringIO(f.readline()) # read the 1st line + # use csv because it will preserve quoted fields with commas + # make a csv.DictReader from header string, use header names for + # fieldnames and set delimiter to header delimiter + header_reader = csv.DictReader(header_str, header_names, + delimiter=header_delim, + skipinitialspace=True) + data = header_reader.next() # parse the header dictionary + # iterate over items in data + for k, v in data.iteritems(): + header_type = header_fields[k][0] # spec'd type + # whitelist header types + if isinstance(header_type, basestring): + if header_type.lower().startswith('int'): + header_type = int # coerce to integer + elif header_type.lower().startswith('long'): + header_type = long # coerce to long integer + elif header_type.lower().startswith('float'): + header_type = float # to floating decimal point + elif header_type.lower().startswith('str'): + header_type = str # coerce to string + elif header_type.lower().startswith('bool'): + header_type = bool # coerce to boolean + else: + raise TypeError('"%s" is not a supported type.' % header_type) + # WARNING! Use of `eval` considered harmful. `header_type` is read + # from JSON file, not secure input, could be used to exploit system + data[k] = header_type(v) # cast v to type + # check for units in 3rd element + if len(header_fields[k]) > 1: + units = UREG(str(header_fields[k][1])) # spec'd units + data[k] = data[k] * units # apply units + return data + + +def _apply_units(data_data, data_units, fname): + """ + Apply units to data. + + :param data_data: NumPy structured array with data from fname. + :type data_data: :class:`numpy.ndarray` + :param data_units: Units of fields in data_data. + :type data_units: dict + :param fname: Name of file from which data_data was read. + :type fname: str + :returns: Dictionary of data with units applied. + :rtype: dict + :raises: :exc:`~carousel.core.exceptions.UnnamedDataError` + """ + data_names = data_data.dtype.names + # raise error if NumPy data doesn't have names + if not data_names: + raise UnnamedDataError(fname) + data = dict.fromkeys(data_names) # dictionary of data read by NumPy + # iterate over data read by NumPy + for data_name in data_names: + if data_name in data_units: + # if units specified in parameters, then convert to string + units = str(data_units[data_name]) + data[data_name] = data_data[data_name] * UREG(units) + elif np.issubdtype(data_data[data_name].dtype, str): + # if no units specified and is string + data[data_name] = data_data[data_name].tolist() + else: + data[data_name] = data_data[data_name] + return data + + +def _utf8_list_to_ascii_tuple(utf8_list): + """ + Convert unicode strings in a list of lists to ascii in a list of tuples. + + :param utf8_list: A nested list of unicode strings. + :type utf8_list: list + """ + for n, utf8 in enumerate(utf8_list): + utf8_list[n][0] = str(utf8[0]) + utf8_list[n][1] = str(utf8[1]) + utf8_list[n] = tuple(utf8) + + +class ParameterizedXLS(XLRDReader): + """ + Concatenate data from parameterized sheets. + + :param parameters: Parameterization information. + + All data in parameterized sheets must be vectors of only numbers. + """ + def __init__(self, parameters): + #: parameterizaton information + self.parameterization = parameters + new_parameters = {} # empty dict for sheet parameters + parameter_sheets = self.parameterization['parameter']['sheets'] + for n, sheet in enumerate(parameter_sheets): + new_parameters[sheet] = {} # empty dictionary for sheet data + for k, v in self.parameterization['data'].iteritems(): + new_parameters[sheet][k + '_' + str(n)] = v + super(ParameterizedXLS, self).__init__(new_parameters) + # filename is instance attribute of XLRDReader + + def load_data(self, filename, *args, **kwargs): + """ + Load parameterized data from different sheets. + """ + # load parameterized data + data = super(ParameterizedXLS, self).load_data(filename) + # add parameter to data + parameter_name = self.parameterization['parameter']['name'] + parameter_values = self.parameterization['parameter']['values'] + parameter_units = str(self.parameterization['parameter']['units']) + data[parameter_name] = parameter_values * UREG(parameter_units) + # number of sheets + num_sheets = len(self.parameterization['parameter']['sheets']) + # parse and concatenate parameterized data + for key in self.parameterization['data']: + units = str(self.parameterization['data'][key].get('units')) or '' + datalist = [] + for n in xrange(num_sheets): + k = key + '_' + str(n) + datalist.append(data[k].reshape((1, -1))) + data.pop(k) # remove unused data keys + data[key] = np.concatenate(datalist, axis=0) * UREG(units) + return data + + def apply_units_to_cache(self, data): + """ + Apply units to :class:`ParameterizedXLS` data reader. + """ + # parameter + parameter_name = self.parameters['parameter']['name'] + parameter_units = str(self.parameters['parameter']['units']) + data[parameter_name] *= UREG(parameter_units) + # data + self.parameters.pop('parameter') + return super(ParameterizedXLS, self).apply_units_to_cache(data) + + +class MixedTextXLS(XLRDReader): + """ + Get parameters from cells mixed with text by matching regex pattern. + + :raises: :exc:`~carousel.core.exceptions.MixedTextNoMatchError` + + Use this reader for spreadsheets that have numerical data mixed with text. + It uses the same parameter file as :class:`XLRDReader` with two additional + keys: "pattern" and "method". The "pattern" must be a valid regex pattern. + Remember to escape backslashes. The "method" must be one of the following + regex methods from :mod:`re`: + + * :func:`~re.match` + * :func:`~re.search` + * :func:`~re.split` + * :func:`~re.findall` + + The default method is :func:`re.search` and the default pattern searches + for any number represented by the FORTRAN formatters "%e", "%E", "%f" or + "%g". This will find one number in any of the formats anywhere in the text + of the cell(s) read. + + Example:: + + { + "Sheet1": { + "sigma_bypass_diode": { + "range": [15, 1], + "pattern": + "\\w+ = ([-+]?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][-+]?\\d+)?)", + "method": "match" + }, + "B_bypass_diode": { + "range": [16, 1], + "method": "findall" + }, + "C_bypass_diode": { + "range": [17, 1], + "pattern": "\((\\d+), (\\d+), (\\d+)\)", + "method": "search" + }, + "cov_bypass_diode": { + "range": [18, 1], + "pattern": "[,;]", + "method": "split" + } + } + } + + These examples all read from "Sheet1". The first example matches one or + more alphanumeric characters at the beginning of the string set equal to an + integer, decimal or number in scientific notation, such as "Std = 0.4985" + from cell B16. The second example finds all numbers matching the default + pattern in cell B17. The third example searches for 3 integers in + parenthesis separated by commas anywhere in cell B18. The last example + splits a string delimited by commas and semicolons in cell B19. + + If no match is found then + :exc:`~carousel.core.exceptions.MixedTextNoMatchError` + is raised. Only numbers can be read, and any single-dimensions will be + squeezed out. For example scalars will become 0-d arrays. + """ + + def load_data(self, filename, *args, **kwargs): + """ + Load text data from different sheets. + """ + # load text data + data = super(MixedTextXLS, self).load_data(filename) + # iterate through sheets in parameters + for sheet_params in self.parameters.itervalues(): + # iterate through the parameters on each sheet + for param, pval in sheet_params.iteritems(): + pattern = pval.get('pattern', EFG_PATTERN) # get pattern + re_meth = pval.get('method', 'search') # get re method + # whitelist re methods, getattr could be considered harmful + if re_meth in RE_METH: + re_meth = getattr(re, pval.get('method', 'search')) + else: + msg = 'Only', '"%s", ' * len(RE_METH) % tuple(RE_METH) + msg += 'regex methods are allowed.' + raise AttributeError(msg) + # if not isinstance(data[param], basestring): + # re_meth = lambda p, dp: [re_meth(p, d) for d in dp] + match = re_meth(pattern, data[param]) # get matches + if match: + try: + match = match.groups() + except AttributeError: + match = [m.groups() for m in match] + npdata = np.array(match, dtype=float).squeeze() + data[param] = npdata * UREG(str(pval.get('units') or '')) + else: + raise MixedTextNoMatchError(re_meth, pattern, data[param]) + return data diff --git a/carousel/core/data_sources.py b/carousel/core/data_sources.py index bdec134..e47f5cb 100644 --- a/carousel/core/data_sources.py +++ b/carousel/core/data_sources.py @@ -73,23 +73,23 @@ def register(self, newdata, *args, **kwargs): :raises: :exc:`~carousel.core.exceptions.UncertaintyPercentUnitsError` """ - kwargs.update(zip(self.meta_names, args)) + kwargs.update(list(zip(self.meta_names, args))) # check uncertainty has units of percent uncertainty = kwargs['uncertainty'] variance = kwargs['variance'] isconstant = kwargs['isconstant'] # check uncertainty is percent if uncertainty: - for k0, d in uncertainty.iteritems(): - for k1, v01 in d.iteritems(): + for k0, d in uncertainty.items(): + for k1, v01 in d.items(): units = v01.units if units != UREG('percent'): keys = '%s-%s' % (k0, k1) raise UncertaintyPercentUnitsError(keys, units) # check variance is square of uncertainty if variance and uncertainty: - for k0, d in variance.iteritems(): - for k1, v01 in d.iteritems(): + for k0, d in variance.items(): + for k1, v01 in d.items(): keys = '%s-%s' % (k0, k1) missing = k1 not in uncertainty[k0] v2 = np.asarray(uncertainty[k0][k1].to('fraction').m) ** 2.0 @@ -97,7 +97,7 @@ def register(self, newdata, *args, **kwargs): raise UncertaintyVarianceError(keys, v01) # check that isconstant is boolean if isconstant: - for k, v in isconstant.iteritems(): + for k, v in isconstant.items(): if not isinstance(v, bool): classname = self.__class__.__name__ error_msg = ['%s meta "isconstant" should be' % classname, @@ -126,7 +126,7 @@ def __new__(mcs, name, bases, attr): attr = mcs.set_meta(bases, attr) # set default meta attributes meta = attr[mcs._meta_attr] - for ma, dflt in mcs._attr_default.iteritems(): + for ma, dflt in mcs._attr_default.items(): a = getattr(meta, ma, None) if a is None: setattr(meta, ma, dflt) @@ -136,7 +136,7 @@ def __new__(mcs, name, bases, attr): return super(DataSourceBase, mcs).__new__(mcs, name, bases, attr) -class DataSource(object): +class DataSource(object, metaclass=DataSourceBase): """ Required interface for all Carousel data sources such as PVSim results, TMY3 data and calculation input files. @@ -154,7 +154,6 @@ class DataSource(object): This is the required interface for all source files containing data used in Carousel. """ - __metaclass__ = DataSourceBase def __init__(self, *args, **kwargs): # save arguments, might need them later @@ -217,8 +216,8 @@ def __init__(self, *args, **kwargs): self._raw_data = copy(self.data) # shallow copy of data self.__prepare_data__() # prepare data for registry # calculate variances - for k0, d in self.uncertainty.iteritems(): - for k1, v01 in d.iteritems(): + for k0, d in self.uncertainty.items(): + for k1, v01 in d.items(): self.variance[k0] = {k1: v01.to('fraction').m ** 2.0} def __prepare_data__(self): @@ -298,6 +297,6 @@ def __getitem__(self, item): def __repr__(self): parameters = getattr(self, DataSourceBase._param_attr) fmt = ('<%s(' % self.__class__.__name__) - fmt += ', '.join('%s=%r' % (k, v) for k, v in parameters.iteritems()) + fmt += ', '.join('%s=%r' % (k, v) for k, v in parameters.items()) fmt += ')>' return fmt diff --git a/carousel/core/data_sources.py.bak b/carousel/core/data_sources.py.bak new file mode 100644 index 0000000..bdec134 --- /dev/null +++ b/carousel/core/data_sources.py.bak @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +""" +This module provides base classes for data sources. Data sources provide +data to calculations. All data used comes from a data source. The requirements +for data sources are as follows: + +1. Data sources must be sub-classed to :class:`DataSource`. +2. They must know where to get their data, either from a file or from other + data sources. +3. They need a data reader that knows how to extract the data from the file, + or combine data in calculations to produce new data. +4. They require a parameter map that states exactly where the data is and what + its units are, what the data will be called in calculations and any other + meta-data the registry requires. +""" + +from carousel.core import ( + UREG, Registry, CarouselJSONEncoder, CommonBase, Parameter +) +from carousel.core.data_readers import JSONReader +from carousel.core.exceptions import ( + UncertaintyPercentUnitsError, UncertaintyVarianceError +) +import json +import os +import time +from copy import copy +import numpy as np + +DFLT_UNC = 1.0 * UREG('percent') # default uncertainty + + +class DataParameter(Parameter): + """ + Field for data parameters. + """ + _attrs = ['units', 'uncertainty', 'isconstant', 'timeseries'] + + +class DataRegistry(Registry): + """ + A registry for data sources. The meta names are: ``uncertainty``, + ``variance``, ``isconstant``, ``timeseries`` and ``data_source`` + """ + #: meta names + meta_names = ['uncertainty', 'variance', 'isconstant', 'timeseries', + 'data_source'] + + def register(self, newdata, *args, **kwargs): + """ + Register data in registry. Meta for each data is specified by positional + or keyword arguments after the new data and consists of the following: + + * ``uncertainty`` - Map of uncertainties in percent corresponding to new + keys. The uncertainty keys must be a subset of the new data keys. + * ``variance`` - Square of the uncertainty (no units). + * ``isconstant``: Map corresponding to new keys whose values are``True`` + if constant or ``False`` if periodic. These keys must be a subset of + the new data keys. + * ``timeseries``: Name of corresponding time series data, ``None`` if no + time series. _EG_: DNI data ``timeseries`` attribute might be set to a + date/time data that it corresponds to. More than one data can have the + same ``timeseries`` data. + * ``data_source``: the + :class:`~carousel.core.data_sources.DataSource` superclass that + was used to acquire this data. This can be used to group data from a + specific source together. + + :param newdata: New data to add to registry. When registering new data, + keys are not allowed to override existing keys in the data + registry. + :type newdata: mapping + :raises: + :exc:`~carousel.core.exceptions.UncertaintyPercentUnitsError` + """ + kwargs.update(zip(self.meta_names, args)) + # check uncertainty has units of percent + uncertainty = kwargs['uncertainty'] + variance = kwargs['variance'] + isconstant = kwargs['isconstant'] + # check uncertainty is percent + if uncertainty: + for k0, d in uncertainty.iteritems(): + for k1, v01 in d.iteritems(): + units = v01.units + if units != UREG('percent'): + keys = '%s-%s' % (k0, k1) + raise UncertaintyPercentUnitsError(keys, units) + # check variance is square of uncertainty + if variance and uncertainty: + for k0, d in variance.iteritems(): + for k1, v01 in d.iteritems(): + keys = '%s-%s' % (k0, k1) + missing = k1 not in uncertainty[k0] + v2 = np.asarray(uncertainty[k0][k1].to('fraction').m) ** 2.0 + if missing or not np.allclose(np.asarray(v01), v2): + raise UncertaintyVarianceError(keys, v01) + # check that isconstant is boolean + if isconstant: + for k, v in isconstant.iteritems(): + if not isinstance(v, bool): + classname = self.__class__.__name__ + error_msg = ['%s meta "isconstant" should be' % classname, + 'boolean, but it was "%s" for "%s".' % (v, k)] + raise TypeError(' '.join(error_msg)) + # call super method, meta must be passed as kwargs! + super(DataRegistry, self).register(newdata, **kwargs) + + +class DataSourceBase(CommonBase): + """ + Base data source meta class. + """ + _path_attr = 'data_path' + _file_attr = 'data_file' + _param_cls = DataParameter + _reader_attr = 'data_reader' + _enable_cache_attr = 'data_cache_enabled' + _attr_default = {_reader_attr: JSONReader, _enable_cache_attr: True} + + def __new__(mcs, name, bases, attr): + # use only with DataSource subclasses + if not CommonBase.get_parents(bases, DataSourceBase): + return super(DataSourceBase, mcs).__new__(mcs, name, bases, attr) + # set _meta combined from bases + attr = mcs.set_meta(bases, attr) + # set default meta attributes + meta = attr[mcs._meta_attr] + for ma, dflt in mcs._attr_default.iteritems(): + a = getattr(meta, ma, None) + if a is None: + setattr(meta, ma, dflt) + # set param file full path if data source path and file specified or + # try to set parameters from class attributes except private/magic + attr = mcs.set_param_file_or_parameters(attr) + return super(DataSourceBase, mcs).__new__(mcs, name, bases, attr) + + +class DataSource(object): + """ + Required interface for all Carousel data sources such as PVSim results, + TMY3 data and calculation input files. + + Each data source must specify a ``data_reader`` which must subclass + :class:`~carousel.core.data_readers.DataReader` and that can read this + data source. The default is + :class:`~carousel.core.data_readers.JSONReader`. + + Each data source must also specify a ``data_file`` and ``data_path`` that + contains the parameters required to import data from the data source using + the data reader. Each data reader had different parameters to specify how + it reads the data source, so consult the API. + + This is the required interface for all source files containing data used in + Carousel. + """ + __metaclass__ = DataSourceBase + + def __init__(self, *args, **kwargs): + # save arguments, might need them later + self.args = args #: positional arguments + self.kwargs = kwargs #: keyword arguments + # make pycharm by defining inferred objects + meta = getattr(self, DataSourceBase._meta_attr) + parameters = getattr(self, DataSourceBase._param_attr) + # check if the data reader is a file reader + filename = None + if meta.data_reader.is_file_reader: + # get filename from args or kwargs + if args: + filename = args[0] + elif kwargs: + filename = kwargs.get('filename') + # raises KeyError: 'filename' if filename isn't given + # TODO: allow user to set explicit filename for cache + #: filename of file containing data + self.filename = filename + # private property + self._is_saved = True + # If filename ends with ".json", then either the original reader was + # a JSONReader or the data was cached. + # If data caching enabled and file doesn't end with ".json", cache it as + # JSON, append ".json" to the original filename and pass original data + # reader as extra argument. + if meta.data_cache_enabled and self._is_cached(): + # switch reader to JSONReader, with old reader as extra arg + data_reader_instance = JSONReader(parameters, meta) + else: + # create the data reader object specified using parameter map + data_reader_instance = meta.data_reader(parameters, meta) + #: data loaded from reader + self.data = data_reader_instance.load_data(*args, **kwargs) + # save JSON file if doesn't exist already. JSONReader checks utc mod + # time vs orig file, and deletes JSON file if orig file is newer. + if meta.data_cache_enabled and not self._is_cached(): + self.saveas_json(self.filename) # ".json" appended by saveas_json + # XXX: default values of uncertainty, isconstant and timeseries are + # empty dictionaries. + #: data uncertainty in percent + self.uncertainty = {} + #: variance + self.variance = {} + #: ``True`` if data is constant for all dynamic calculations + self.isconstant = {} + #: name of corresponding time series data, ``None`` if no time series + self.timeseries = {} + #: name of :class:`DataSource` + self.data_source = dict.fromkeys(self.data, self.__class__.__name__) + # TODO: need a consistent way to handle uncertainty, isconstant and time + # series + # XXX: Each superclass should do the following: + # * prepare the raw data from reader for the registry. Some examples of + # data preparation are combining numbers and units and uncertainties, + # data validation, combining years, months, days and hours into + # datetime objects and parsing data from strings. + # * handle uncertainty, isconstant, timeseries and any other meta data. + self._raw_data = copy(self.data) # shallow copy of data + self.__prepare_data__() # prepare data for registry + # calculate variances + for k0, d in self.uncertainty.iteritems(): + for k1, v01 in d.iteritems(): + self.variance[k0] = {k1: v01.to('fraction').m ** 2.0} + + def __prepare_data__(self): + """ + Prepare raw data from reader for the registry. Some examples of data + preparation are combining numbers and units and uncertainties, data + validation, combining years, months, days and hours into datetime + objects and parsing data from strings. + + Each data superclass should implement this method. If there is no data + preparation then use ``pass``. + """ + raise NotImplementedError('Data preparation not implemented. ' + + 'Use ``pass`` if not required.') + + def _is_cached(self, ext='.json'): + """ + Determine if ``filename`` is cached using extension ``ex`` a string. + + :param ext: extension used to cache ``filename``, default is '.json' + :type ext: str + :return: True if ``filename`` is cached using extensions ``ex`` + :rtype: bool + """ + # extension must start with a dot + if not ext.startswith('.'): + # prepend extension with a dot + ext = '.%s' % ext + # cache file is filename with extension + cache_file = '%s%s' % (self.filename, ext) + # if filename already ends with extension or there's a file with the + # extension, then assume the data is cached + return self.filename.endswith(ext) or os.path.exists(cache_file) + + @property + def issaved(self): + return self._is_saved + + def saveas_json(self, save_name): + """ + Save :attr:`data`, :attr:`param_file`, original :attr:`data_reader` + and UTC modification time as keys in JSON file. If data is edited then + it should be saved using this method. Non-JSON data files are also + saved using this method. + + :param save_name: Name to save JSON file as, ".json" is appended. + :type save_name: str + """ + # make pycharm by defining inferred objects + meta = getattr(self, DataSourceBase._meta_attr) + param_file = getattr(self, DataSourceBase._param_file) + # JSONEncoder removes units and converts arrays to lists + # save last time file was modified + utc_mod_time = list(time.gmtime(os.path.getmtime(save_name))) + json_data = {'data': self.data, 'utc_mod_time': utc_mod_time, + 'param_file': param_file, + 'data_reader': meta.data_reader.__name__, + 'data_source': self.__class__.__name__} + if not save_name.endswith('.json'): + save_name += '.json' + with open(save_name, 'w') as fp: + json.dump(json_data, fp, cls=CarouselJSONEncoder) + # TODO: test file save successful + # TODO: need to update model + self._is_saved = True + + def edit(self, edits, data_reg): + """ + Edit data in :class:`Data_Source`. Sets :attr:`issaved` to ``False``. + """ + data_reg.update(edits) + self._is_saved = False + + def __getitem__(self, item): + return self.data[item] + + def __repr__(self): + parameters = getattr(self, DataSourceBase._param_attr) + fmt = ('<%s(' % self.__class__.__name__) + fmt += ', '.join('%s=%r' % (k, v) for k, v in parameters.iteritems()) + fmt += ')>' + return fmt diff --git a/carousel/core/formulas.py b/carousel/core/formulas.py index d9b5f0a..2244c1b 100644 --- a/carousel/core/formulas.py +++ b/carousel/core/formulas.py @@ -43,7 +43,7 @@ def register(self, new_formulas, *args, **kwargs): :param new_formulas: new formulas to add to registry. """ - kwargs.update(zip(self.meta_names, args)) + kwargs.update(list(zip(self.meta_names, args))) # call super method, meta must be passed as kwargs! super(FormulaRegistry, self).register(new_formulas, **kwargs) @@ -149,7 +149,7 @@ def import_formulas(self): # iterate through formulas for f in formula_param: formulas[f] = getattr(mod, f) - elif isinstance(formula_param, basestring): + elif isinstance(formula_param, str): # only one formula # FYI: use basestring to test for str and unicode # SEE: http://docs.python.org/2/library/functions.html#basestring @@ -172,7 +172,7 @@ class NumericalExpressionImporter(FormulaImporter): def import_formulas(self): formulas = {} # an empty list of formulas formula_param = self.parameters # formulas key - for f, p in formula_param.iteritems(): + for f, p in formula_param.items(): formulas[f] = lambda *args: ne.evaluate( p['extras']['expression'], {k: a for k, a in zip(p['args'], args)}, {} @@ -201,7 +201,7 @@ def __new__(mcs, name, bases, attr): return super(FormulaBase, mcs).__new__(mcs, name, bases, attr) -class Formula(object): +class Formula(object, metaclass=FormulaBase): """ A class for formulas. @@ -216,7 +216,6 @@ class Formula(object): This is the required interface for all source files containing formulas used in Carousel. """ - __metaclass__ = FormulaBase def __init__(self): # check for path listed in param file @@ -252,7 +251,7 @@ def __init__(self): # if formulas is a list or if it can't be iterated as a dictionary # then log warning and return try: - formula_param_generator = formula_param.iteritems() + formula_param_generator = iter(formula_param.items()) except AttributeError as err: LOGGER.warning('Attribute Error: %s', err.message) return @@ -285,7 +284,7 @@ def __init__(self): # check if retval units is a string or None before adding # extra units for Jacobian and covariance ret_units = self.units[k][0] - if isinstance(ret_units, basestring) or ret_units is None: + if isinstance(ret_units, str) or ret_units is None: self.units[k][0] = [ret_units] try: self.units[k][0] += [None, None] diff --git a/carousel/core/formulas.py.bak b/carousel/core/formulas.py.bak new file mode 100644 index 0000000..d9b5f0a --- /dev/null +++ b/carousel/core/formulas.py.bak @@ -0,0 +1,300 @@ +# -*- coding: utf-8 -*- + +""" +This module provides the framework for formulas. All formulas should inherit +from the Formula class in this module. Formula sources must include a +formula importer, or can subclass one of the formula importers here. +""" + +from carousel.core import logging, CommonBase, Registry, UREG, Parameter +import imp +import importlib +import os +import sys +import numexpr as ne +import inspect +from uncertainty_wrapper import unc_wrapper_args + +LOGGER = logging.getLogger(__name__) + + +class FormulaParameter(Parameter): + """ + Field for data parameters. + """ + _attrs = ['islinear', 'args', 'units', 'isconstant'] + + +class FormulaRegistry(Registry): + """ + A registry for formulas. The meta names are ``islinear``, ``args``, + ``units`` and ``isconstant``. + """ + meta_names = ['islinear', 'args', 'units', 'isconstant'] + + def register(self, new_formulas, *args, **kwargs): + """ + Register formula and meta data. + + * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. + * ``args`` - position of arguments + * ``units`` - units of returns and arguments as pair of tuples + * ``isconstant`` - constant arguments not included in covariance + + :param new_formulas: new formulas to add to registry. + """ + kwargs.update(zip(self.meta_names, args)) + # call super method, meta must be passed as kwargs! + super(FormulaRegistry, self).register(new_formulas, **kwargs) + + +class FormulaImporter(object): + """ + A class that imports formulas. + + :param parameters: Parameters used to import formulas. + :type parameters: dict + :param meta: Options for formulas and formula inporters + :type meta: Meta + """ + def __init__(self, parameters, meta=None): + #: parameters to be read by reader + self.parameters = parameters + #: options for importer + self.meta = meta + + def import_formulas(self): + """ + This method must be implemented by each formula importer. + + :returns: formulas + :rtype: dict + :raises: :exc:`~exceptions.NotImplementedError` + """ + raise NotImplementedError(' '.join(['Function "import_formulas" is', + 'not implemented.'])) + + +class PyModuleImporter(FormulaImporter): + """ + Import formulas from a Python module. + """ + def import_formulas(self): + """ + Import formulas specified in :attr:`parameters`. + + :returns: formulas + :rtype: dict + """ + # TODO: unit tests! + # TODO: move this to somewhere else and call it "importy", maybe + # core.__init__.py since a lot of modules might use it. + module = self.meta.module # module read from parameters + package = getattr(self.meta, 'package', None) # package read from meta + name = package + module if package else module # concat pkg + name + path = getattr(self.meta, 'path', None) # path read from parameters + # import module using module and package + mod = None + # SEE ALSO: http://docs.python.org/2/library/imp.html#examples + try: + # fast path: see if module was already imported + mod = sys.modules[name] + except KeyError: + try: + # import module specified in parameters + mod = importlib.import_module(module, package) + except ImportError as err: + if not path: + msg = ('%s could not be imported either because it was not ' + 'on the PYTHONPATH or path was not given.') + LOGGER.exception(msg, name) + raise err + else: + # import module using path + # expand ~, environmental variables and make path absolute + if not os.path.isabs(path): + path = os.path.expanduser(os.path.expandvars(path)) + path = os.path.abspath(path) + # paths must be a list + paths = [path] + # imp does not find hierarchical module names, find and load + # packages recursively, then load module, see last paragraph + # https://docs.python.org/2/library/imp.html#imp.find_module + pname = '' # full dotted name of package to load + # traverse namespace + while name: + # if dot in name get first package + if '.' in name: + pkg, name = name.split('.', 1) + else: + pkg, name = name, None # pkg is the module + # Find package or module by name and path + fp, filename, desc = imp.find_module(pkg, paths) + # full dotted name of package to load + pname = pkg if not pname else '%s.%s' % (pname, pkg) + LOGGER.debug('package name: %s', pname) + # try to load the package or module + try: + mod = imp.load_module(pname, fp, filename, desc) + finally: + if fp: + fp.close() + # append package paths for imp.find_module + if name: + paths = mod.__path__ + formulas = {} # an empty list of formulas + formula_param = self.parameters # formulas key + # FYI: iterating over dictionary is equivalent to iterkeys() + if isinstance(formula_param, (list, tuple, dict)): + # iterate through formulas + for f in formula_param: + formulas[f] = getattr(mod, f) + elif isinstance(formula_param, basestring): + # only one formula + # FYI: use basestring to test for str and unicode + # SEE: http://docs.python.org/2/library/functions.html#basestring + formulas[formula_param] = getattr(mod, formula_param) + else: + # autodetect formulas assuming first letter is f + formulas = {f: getattr(mod, f) for f in dir(mod) if f[:2] == 'f_'} + if not len(formulas): + for f in dir(mod): + mod_attr = getattr(mod, f) + if inspect.isfunction(mod_attr): + formulas[f] = mod_attr + return formulas + + +class NumericalExpressionImporter(FormulaImporter): + """ + Import formulas from numerical expressions using Python Numexpr. + """ + def import_formulas(self): + formulas = {} # an empty list of formulas + formula_param = self.parameters # formulas key + for f, p in formula_param.iteritems(): + formulas[f] = lambda *args: ne.evaluate( + p['extras']['expression'], + {k: a for k, a in zip(p['args'], args)}, {} + ).reshape(1, -1) + LOGGER.debug('formulas %s = %r', f, formulas[f]) + return formulas + + +class FormulaBase(CommonBase): + """ + Metaclass for formulas. + """ + _path_attr = 'formulas_path' + _file_attr = 'formulas_file' + _param_cls = FormulaParameter + + def __new__(mcs, name, bases, attr): + # use only with Formula subclasses + if not CommonBase.get_parents(bases, FormulaBase): + return super(FormulaBase, mcs).__new__(mcs, name, bases, attr) + # set _meta combined from bases + attr = mcs.set_meta(bases, attr) + # set param file full path if formulas path and file specified or + # try to set parameters from class attributes except private/magic + attr = mcs.set_param_file_or_parameters(attr) + return super(FormulaBase, mcs).__new__(mcs, name, bases, attr) + + +class Formula(object): + """ + A class for formulas. + + Specify ``formula_importer`` which must subclass :class:`FormulaImporter` + to import formula source files as class. If no ``formula_importer`` is + specified, the default is + :class:`~carousel.core.formulas.PyModuleImporter`. + + Specify ``formula_path`` and ``formula_file`` that contains formulas in + string form or parameters used to import the formula source file. + + This is the required interface for all source files containing formulas + used in Carousel. + """ + __metaclass__ = FormulaBase + + def __init__(self): + # check for path listed in param file + path = getattr(self._meta, 'path', None) + if path is None: + proxy_file = self.param_file if self.param_file else __file__ + # use the same path as the param file or this file if no param file + self._meta.path = os.path.dirname(proxy_file) + + # check for path listed in param file + formula_importer = getattr(self._meta, 'formula_importer', None) + if formula_importer is None: + #: formula importer class, default is ``PyModuleImporter`` + self._meta.formula_importer = PyModuleImporter + + meta = getattr(self, '_meta', None) # options for formulas + importer_instance = self._meta.formula_importer(self.parameters, meta) + #: formulas loaded by the importer using specified parameters + self.formulas = importer_instance.import_formulas() + #: linearity determined by each data source? + self.islinear = {} + #: positional arguments + self.args = {} + #: expected units of returns and arguments as pair of tuples + self.units = {} + #: constant arguments that are not included in covariance calculation + self.isconstant = {} + # sequence of formulas, don't propagate uncertainty or units + for f in self.formulas: + self.islinear[f] = True + self.args[f] = inspect.getargspec(self.formulas[f]).args + formula_param = self.parameters # formulas key + # if formulas is a list or if it can't be iterated as a dictionary + # then log warning and return + try: + formula_param_generator = formula_param.iteritems() + except AttributeError as err: + LOGGER.warning('Attribute Error: %s', err.message) + return + # formula dictionary + for k, v in formula_param_generator: + if not v: + # skip formula if attributes are null or empty + continue + # get islinear formula attribute + is_linear = v.get('islinear') + if is_linear is not None: + self.islinear[k] = is_linear + # get positional arguments + f_args = v.get('args') + if f_args is not None: + self.args[k] = f_args + # get constant arguments to exclude from covariance + self.isconstant[k] = v.get('isconstant') + if self.isconstant[k] is not None: + argn = [n for n, a in enumerate(self.args[k]) if a not in + self.isconstant[k]] + LOGGER.debug('%s arg nums: %r', k, argn) + self.formulas[k] = unc_wrapper_args(*argn)(self.formulas[k]) + # get units of returns and arguments + self.units[k] = v.get('units') + if self.units[k] is not None: + # append units for covariance and Jacobian if all args + # constant and more than one return output + if self.isconstant[k] is not None: + # check if retval units is a string or None before adding + # extra units for Jacobian and covariance + ret_units = self.units[k][0] + if isinstance(ret_units, basestring) or ret_units is None: + self.units[k][0] = [ret_units] + try: + self.units[k][0] += [None, None] + except TypeError: + self.units[k][0] += (None, None) + # wrap function with Pint's unit wrapper + self.formulas[k] = UREG.wraps(*self.units[k])( + self.formulas[k] + ) + + def __getitem__(self, item): + return self.formulas[item] diff --git a/carousel/core/layers.py b/carousel/core/layers.py index cd83432..01b76c7 100644 --- a/carousel/core/layers.py +++ b/carousel/core/layers.py @@ -181,7 +181,7 @@ def load(self, rel_path=None): """ Add data_sources to layer and open files with data for the data_source. """ - for k, v in self.layer.iteritems(): + for k, v in self.layer.items(): self.add(k, v['module'], v.get('package')) filename = v.get('filename') path = v.get('path') @@ -193,7 +193,7 @@ def load(self, rel_path=None): path = os.path.join(rel_path, path) # filename can be a list or a string, concatenate list with # os.pathsep and append the full path to strings. - if isinstance(filename, basestring): + if isinstance(filename, str): filename = os.path.join(path, filename) else: file_list = [os.path.join(path, f) for f in filename] @@ -211,7 +211,7 @@ def edit(self, data_src, value): """ # check if opening file if 'filename' in value: - items = [k for k, v in self.reg.data_source.iteritems() if + items = [k for k, v in self.reg.data_source.items() if v == data_src] self.reg.unregister(items) # remove items from Registry # open file and register new data @@ -222,7 +222,7 @@ def delete(self, data_src): """ Delete data sources. """ - items = self.objects[data_src].data.keys() # items to edit + items = list(self.objects[data_src].data.keys()) # items to edit self.reg.unregister(items) # remove items from Registry self.layer.pop(data_src) # remove data source from layer self.objects.pop(data_src) # remove data_source object @@ -266,7 +266,7 @@ def load(self, _=None): """ Add formulas to layer. """ - for k, v in self.layer.iteritems(): + for k, v in self.layer.items(): self.add(k, v['module'], v.get('package')) def edit(self, src_cls, value): @@ -306,7 +306,7 @@ def load(self, _=None): """ Add calcs to layer. """ - for k, v in self.layer.iteritems(): + for k, v in self.layer.items(): self.add(k, v['module'], v.get('package')) def edit(self, src_cls, value): @@ -346,7 +346,7 @@ def load(self, _=None): """ Add output_source to layer. """ - for k, v in self.layer.iteritems(): + for k, v in self.layer.items(): self.add(k, v['module'], v.get('package')) def edit(self, src_cls, value): @@ -387,7 +387,7 @@ def load(self, rel_path=None): """ Add sim_src to layer. """ - for k, v in self.layer.iteritems(): + for k, v in self.layer.items(): self.add(k, v['module'], v.get('package')) filename = v.get('filename') path = v.get('path') diff --git a/carousel/core/layers.py.bak b/carousel/core/layers.py.bak new file mode 100644 index 0000000..cd83432 --- /dev/null +++ b/carousel/core/layers.py.bak @@ -0,0 +1,408 @@ +# -*- coding: utf-8 -*- +""" +This is the Layers module. There are five layers in a Carousel model: + +* Data +* Formulas +* Calculations +* Outputs +* Simulations + +Layers are used to assemble the model. For example, the data layer assembles +all of the :ref:`data-sources`, calling the :ref:`data-readers` and putting all +of the data (and meta) into the +:class:`~carousel.core.data_sources.DataRegistry`. + +In general all model layers have add, open and +:meth:`~carousel.core.layers.Layer.load` methods. The add method adds +a particular format such as a +:class:`~carousel.core.data_sources.DataSource`. The open method gets +data from a file in the format that was added. The +:meth:`~carousel.core.layers.Layer.load` method loads the layer into +the model. The :meth:`~carousel.core.layers.Layer.load` method must +be implemented in each subclass of +:class:`~carousel.core.layers.Layer` or +:exc:`~exceptions.NotImplementedError` is raised. +""" + +import importlib +import os +from carousel.core import logging, warnings +from carousel.core.simulations import SimRegistry, Simulation +from carousel.core.data_sources import DataRegistry, DataSource +from carousel.core.formulas import FormulaRegistry, Formula +from carousel.core.calculations import CalcRegistry, Calc +from carousel.core.outputs import OutputRegistry, Output + +LOGGER = logging.getLogger(__name__) +SIMFILE_LOAD_WARNING = ' '.join([ + 'Use of "filename" or "path" in model for simulation is deprecated.', + 'This will raise an exception in the future.' +]) + + +class Layer(object): + """ + A layer in the model. + + :param sources: Dictionary of model parameters specific to this layer. + :type sources: dict + """ + reg_cls = NotImplemented #: registry class + src_cls = NotImplemented #: source class + + def __init__(self, sources=None): + #: dictionary of layer sources + self.layer = sources + #: dictionary of layer source classes added to the layer + self.sources = {} + #: dictionary of source class instances added to the layer + self.objects = {} + #: registry of items contained in this layer + self.reg = self.reg_cls() + + def add(self, src_cls, module, package=None): + """ + Add layer class to model. This method may be overloaded by layer. + + :param src_cls: layer class to add, should not start with underscores + :type src_cls: str + :param module: Python module that contains layer class + :type module: str + :param package: optional package containing module with layer class + :type package: str + :raises: :exc:`~exceptions.NotImplementedError` + """ + # import module containing the layer class + mod = importlib.import_module(module, package) + # get layer class definition from the module + self.sources[src_cls] = getattr(mod, src_cls) + + def load(self, relpath=None): + """ + Load the layer from the model data. This method must be implemented by + each layer. + + :param relpath: alternate path if specified path is missing or ``None`` + :raises: :exc:`~exceptions.NotImplementedError` + """ + raise NotImplementedError('load') + + def delete(self, src_cls): + """ + Delete layer source class from layer. + :param src_cls: layer source class to delete. + :raises: :exc:`~exceptions.NotImplementedError` + """ + raise NotImplementedError('delete') + + def edit(self, src_cls, value): + """ + Edit layer source class with value. + + :param src_cls: layer source class to edit + :type src_cls: str + :param value: new value of layer source class + :raises: :exc:`~exceptions.NotImplementedError` + """ + raise NotImplementedError('delete') + + +class Data(Layer): + """ + The Data layer of the model. + + The :attr:`~Layer.layer` attribute is a dictionary of data sources names + as keys of dictionaries for each data source with the module and optionally + the package containing the module, the filename, which can be ``None``, + containing specific data for the data source and an optional path to the + data file. If the path is ``None``, then the default path for data internal + to Carousel is used. External data files should specify the path. + """ + reg_cls = DataRegistry #: data layer registry + src_cls = DataSource #: data layer source + + def add(self, data_source, module, package=None): + """ + Add data_source to model. Tries to import module, then looks for data + source class definition. + + :param data_source: Name of data source to add. + :type data_source: str + :param module: Module in which data source resides. Can be absolute or + relative. See :func:`importlib.import_module` + :type module: str + :param package: Optional, but must be used if module is relative. + :type package: str + + .. seealso:: + :func:`importlib.import_module` + """ + super(Data, self).add(data_source, module, package) + # only update layer info if it is missing! + if data_source not in self.layer: + # copy data source parameters to :attr:`Layer.layer` + self.layer[data_source] = {'module': module, 'package': package} + # add a place holder for the data source object when it's constructed + self.objects[data_source] = None + + def open(self, data_source, *args, **kwargs): + """ + Open filename to get data for data_source. + + :param data_source: Data source for which the file contains data. + :type data_source: str + + Positional and keyword arguments can contain either the data to use for + the data source or the full path of the file which contains data for the + data source. + """ + if self.sources[data_source]._meta.data_reader.is_file_reader: + filename = kwargs.get('filename') + path = kwargs.get('path', '') + rel_path = kwargs.get('rel_path', '') + if len(args) > 0: + filename = args[0] + if len(args) > 1: + path = args[1] + if len(args) > 2: + rel_path = args[2] + args = () + kwargs = {'filename': os.path.join(rel_path, path, filename)} + LOGGER.debug('filename: %s', kwargs['filename']) + # call constructor of data source with filename argument + self.objects[data_source] = self.sources[data_source](*args, **kwargs) + # register data and uncertainty in registry + data_src_obj = self.objects[data_source] + meta = [getattr(data_src_obj, m) for m in self.reg.meta_names] + self.reg.register(data_src_obj.data, *meta) + + def load(self, rel_path=None): + """ + Add data_sources to layer and open files with data for the data_source. + """ + for k, v in self.layer.iteritems(): + self.add(k, v['module'], v.get('package')) + filename = v.get('filename') + path = v.get('path') + if filename: + # default path for data is in ../data + if not path: + path = rel_path + else: + path = os.path.join(rel_path, path) + # filename can be a list or a string, concatenate list with + # os.pathsep and append the full path to strings. + if isinstance(filename, basestring): + filename = os.path.join(path, filename) + else: + file_list = [os.path.join(path, f) for f in filename] + filename = os.path.pathsep.join(file_list) + self.open(k, filename) + + def edit(self, data_src, value): + """ + Edit data layer. + + :param data_src: Name of :class:`DataSource` to edit. + :type data_src: str + :param value: Values to edit. + :type value: dict + """ + # check if opening file + if 'filename' in value: + items = [k for k, v in self.reg.data_source.iteritems() if + v == data_src] + self.reg.unregister(items) # remove items from Registry + # open file and register new data + self.open(data_src, value['filename'], value.get('path')) + self.layer[data_src].update(value) # update layer with new items + + def delete(self, data_src): + """ + Delete data sources. + """ + items = self.objects[data_src].data.keys() # items to edit + self.reg.unregister(items) # remove items from Registry + self.layer.pop(data_src) # remove data source from layer + self.objects.pop(data_src) # remove data_source object + self.sources.pop(data_src) # remove data_source object + + +class Formulas(Layer): + """ + Layer containing formulas. + """ + reg_cls = FormulaRegistry #: formula layer registry + src_cls = Formula #: formula layer source + + def add(self, formula, module, package=None): + """ + Import module (from package) with formulas, import formulas and add + them to formula registry. + + :param formula: Name of the formula source to add/open. + :param module: Module containing formula source. + :param package: [Optional] Package of formula source module. + + .. seealso:: + :func:`importlib.import_module` + """ + super(Formulas, self).add(formula, module, package) + # only update layer info if it is missing! + if formula not in self.layer: + # copy formula source parameters to :attr:`Layer.layer` + self.layer[formula] = {'module': module, 'package': package} + self.objects[formula] = self.sources[formula]() + # register formula and linearity in registry + formula_src_obj = self.objects[formula] + meta = [getattr(formula_src_obj, m) for m in self.reg.meta_names] + self.reg.register(formula_src_obj.formulas, *meta) + + def open(self, formula, module, package=None): + self.add(formula, module, package=package) + + def load(self, _=None): + """ + Add formulas to layer. + """ + for k, v in self.layer.iteritems(): + self.add(k, v['module'], v.get('package')) + + def edit(self, src_cls, value): + pass + + def delete(self, src_cls): + pass + + +class Calculations(Layer): + """ + Layer containing formulas. + """ + reg_cls = CalcRegistry #: calculations layer registry + src_cls = Calc #: calculation layer source + + def add(self, calc, module, package=None): + """ + Add calc to layer. + """ + super(Calculations, self).add(calc, module, package) + # only update layer info if it is missing! + if calc not in self.layer: + # copy calc source parameters to :attr:`Layer.layer` + self.layer[calc] = {'module': module, 'package': package} + # instantiate the calc object + self.objects[calc] = self.sources[calc]() + # register calc and dependencies in registry + calc_src_obj = self.objects[calc] + meta = [getattr(calc_src_obj, m) for m in self.reg.meta_names] + self.reg.register(calc_src_obj.calcs, *meta) + + def open(self, calc, module, package=None): + self.add(calc, module, package=package) + + def load(self, _=None): + """ + Add calcs to layer. + """ + for k, v in self.layer.iteritems(): + self.add(k, v['module'], v.get('package')) + + def edit(self, src_cls, value): + pass + + def delete(self, src_cls): + pass + + +class Outputs(Layer): + """ + Layer containing output sources. + """ + reg_cls = OutputRegistry #: output layer registry + src_cls = Output #: output layer source + + def add(self, output, module, package=None): + """ + Add output to + """ + super(Outputs, self).add(output, module, package) + # only update layer info if it is missing! + if output not in self.layer: + # copy output source parameters to :attr:`Layer.layer` + self.layer[output] = {'module': module, 'package': package} + # instantiate the output object + self.objects[output] = self.sources[output]() + # register outputs and meta-data in registry + out_src_obj = self.objects[output] + meta = [getattr(out_src_obj, m) for m in self.reg.meta_names] + self.reg.register(out_src_obj.outputs, *meta) + + def open(self, output, module, package=None): + self.add(output, module, package=package) + + def load(self, _=None): + """ + Add output_source to layer. + """ + for k, v in self.layer.iteritems(): + self.add(k, v['module'], v.get('package')) + + def edit(self, src_cls, value): + pass + + def delete(self, src_cls): + pass + + +class Simulations(Layer): + """ + Layer containing simulation sources. + """ + reg_cls = SimRegistry #: simulation layer registry + src_cls = Simulation #: simulation layer source + + def add(self, sim, module, package=None): + """ + Add simulation to layer. + """ + super(Simulations, self).add(sim, module, package) + # only update layer info if it is missing! + if sim not in self.layer: + # copy simulation source parameters to :attr:`Layer.layer` + self.layer[sim] = {'module': module, 'package': package} + + def open(self, sim, filename=None): + # call constructor of sim source with filename argument + self.objects[sim] = self.sources[sim](filename) + # register simulations in registry, the only reason to register an item + # is make sure it doesn't overwrite other items + sim_src_obj = self.objects[sim] + meta = [{str(sim): getattr(sim_src_obj, m)} for m in + self.reg.meta_names] + self.reg.register({sim: sim_src_obj}, *meta) + + def load(self, rel_path=None): + """ + Add sim_src to layer. + """ + for k, v in self.layer.iteritems(): + self.add(k, v['module'], v.get('package')) + filename = v.get('filename') + path = v.get('path') + if filename: + warnings.warn(DeprecationWarning(SIMFILE_LOAD_WARNING)) + # default path for data is in ../simulations + if not path: + path = rel_path + else: + path = os.path.join(rel_path, path) + filename = os.path.join(path, filename) + self.open(k, filename) + + def edit(self, src_cls, value): + pass + + def delete(self, src_cls): + pass diff --git a/carousel/core/models.py b/carousel/core/models.py index 1d1008f..362cfd3 100644 --- a/carousel/core/models.py +++ b/carousel/core/models.py @@ -67,21 +67,20 @@ def __new__(mcs, name, bases, attr): attr = mcs.set_param_file_or_parameters(attr) # set default meta attributes meta = attr[mcs._meta_attr] - for ma, dflt in mcs._attr_default.iteritems(): + for ma, dflt in mcs._attr_default.items(): a = getattr(meta, ma, None) if a is None: setattr(meta, ma, dflt) return super(ModelBase, mcs).__new__(mcs, name, bases, attr) -class Model(object): +class Model(object, metaclass=ModelBase): """ A class for models. Carousel is a subclass of the :class:`Model` class. :param modelfile: The name of the JSON file with model data. :type modelfile: str """ - __metaclass__ = ModelBase def __init__(self, modelfile=None): meta = getattr(self, ModelBase._meta_attr) @@ -133,7 +132,7 @@ def _load(self, layer=None): # read and load JSON parameter map file as "parameters" with open(self.param_file, 'r') as param_file: file_params = json.load(param_file) - for layer, params in file_params.iteritems(): + for layer, params in file_params.items(): # update parameters from file self.parameters[layer] = ModelParameter(**params) # if layer argument spec'd then only update/load spec'd layer @@ -176,7 +175,7 @@ def _initialize(self): # FIXME: move import inside loop for custom layers in different modules mod = importlib.import_module(meta.layers_mod, meta.layers_pkg) src_model = {} - for layer, value in self.model.iteritems(): + for layer, value in self.model.items(): # from layers module get the layer's class definition layer_cls = getattr(mod, meta.layer_cls_names[layer]) # class def self.layers[layer] = layer_cls # add layer class def to model @@ -189,7 +188,7 @@ def _initialize(self): except (TypeError, ValueError): kwargs = {} # no key work arguments # skip if not a source class - if isinstance(src, basestring): + if isinstance(src, str): continue # generate layer value from source class src_value[src.__name__] = {'module': src.__module__, @@ -205,7 +204,7 @@ def _initialize(self): value = dict(value['sources']) except ValueError: value = dict.fromkeys(value['sources'], {}) - for src in value.viewkeys(): + for src in value.keys(): if srcmod is not None: value[src]['module'] = srcmod if srcpkg is not None: @@ -252,7 +251,7 @@ def edit(self, layer, item, delete=False): if delete: return layer_obj # iterate over items and edit layer - for k, v in item.iteritems(): + for k, v in item.items(): if k in layer_obj.layer: layer_obj.edit(k, v) # edit layer else: @@ -267,14 +266,14 @@ def add(self, layer, items): """ Add items in model. """ - for k in items.iterkeys(): + for k in items.keys(): if k in self.model[layer]: raise Exception('item %s is already in layer %s' % (k, layer)) self.model[layer].update(items) # this should also update Layer.layer, the layer data # same as calling layer constructor # so now just need to add items to the layer - for k, v in items.iteritems(): + for k, v in items.items(): getattr(self, layer).add(k, v['module'], v.get('package')) def delete(self, layer, items): @@ -338,7 +337,7 @@ def command(self, cmd, progress_hook=None, *args, **kwargs): cmds = cmd.split(None, 1) # split commands and simulations sim_names = cmds[1:] # simulations if not sim_names: - sim_names = self.cmd_layer.reg.iterkeys() + sim_names = iter(self.cmd_layer.reg.keys()) for sim_name in sim_names: sim_cmd = getattr(self.cmd_layer.reg[sim_name], cmd) sim_cmd(self, progress_hook=progress_hook, *args, **kwargs) diff --git a/carousel/core/models.py.bak b/carousel/core/models.py.bak new file mode 100644 index 0000000..1d1008f --- /dev/null +++ b/carousel/core/models.py.bak @@ -0,0 +1,344 @@ +# -*- coding: utf-8 -*- +""" +This is the Carousel :mod:`~carousel.core.models` module that contains +definitions for the :class:`~carousel.core.models.Model` class. + +The Carousel model contains five layers: +:class:`~carousel.core.layers.Data`, +:class:`~carousel.core.layers.Formulas`, +:class:`~carousel.core.layers.Calculations`, +:class:`~carousel.core.layers.Outputs` and +:class:`~carousel.core.layers.Simulations`. The +:class:`~carousel.core.layers.Data` layer organizes +:ref:`data-sources` by providing methods to add and load data for Carousel. +The :class:`~carousel.core.layers.Formulas` layer loads +:ref:`formulas` used by :class:`~carousel.core.layers.Calculations` +calculations. The :class:`~carousel.core.layers.Outputs` layer +organizes the calculated outputs for use in other calculations. Finally the +:class:`~carousel.core.layers.Simulations` layer organizes +options such as how long the simulation should run and takes care of actually +running the simulation. +""" + +import importlib +import json +import os +import copy +from carousel.core import logging, _listify, CommonBase, Parameter + +LOGGER = logging.getLogger(__name__) +LAYERS_MOD = '.layers' +LAYERS_PKG = 'carousel.core' +LAYER_CLS_NAMES = {'data': 'Data', 'calculations': 'Calculations', + 'formulas': 'Formulas', 'outputs': 'Outputs', + 'simulations': 'Simulations'} + + +class ModelParameter(Parameter): + _attrs = ['layer', 'module', 'package', 'path', 'sources'] + + +class ModelBase(CommonBase): + """ + Base model meta class. If model has class attributes "modelpath" and + "modelfile" then layer class names and model configuration will be read from + the file on that path. Otherwise layer class names will be read from the + class attributes. + """ + _path_attr = 'modelpath' + _file_attr = 'modelfile' + _param_cls = ModelParameter + _layers_cls_attr = 'layer_cls_names' + _layers_mod_attr = 'layers_mod' + _layers_pkg_attr = 'layers_pkg' + _cmd_layer_attr = 'cmd_layer_name' + _attr_default = { + _layers_cls_attr: LAYER_CLS_NAMES, _layers_mod_attr: LAYERS_MOD, + _layers_pkg_attr: LAYERS_PKG, _cmd_layer_attr: 'simulations' + } + + def __new__(mcs, name, bases, attr): + # use only with Model subclasses + if not CommonBase.get_parents(bases, ModelBase): + return super(ModelBase, mcs).__new__(mcs, name, bases, attr) + attr = mcs.set_meta(bases, attr) + # set param file full path if data source path and file specified or + # try to set parameters from class attributes except private/magic + attr = mcs.set_param_file_or_parameters(attr) + # set default meta attributes + meta = attr[mcs._meta_attr] + for ma, dflt in mcs._attr_default.iteritems(): + a = getattr(meta, ma, None) + if a is None: + setattr(meta, ma, dflt) + return super(ModelBase, mcs).__new__(mcs, name, bases, attr) + + +class Model(object): + """ + A class for models. Carousel is a subclass of the :class:`Model` class. + + :param modelfile: The name of the JSON file with model data. + :type modelfile: str + """ + __metaclass__ = ModelBase + + def __init__(self, modelfile=None): + meta = getattr(self, ModelBase._meta_attr) + parameters = getattr(self, ModelBase._param_attr) + # load modelfile if it's an argument + if modelfile is not None: + #: model file + self.param_file = os.path.abspath(modelfile) + LOGGER.debug('modelfile: %s', modelfile) + else: + modelfile = self.param_file + # check meta class for model if declared inline + if parameters: + # TODO: separate model and parameters according to comments in #78 + #: dictionary of the model + self.model = model = copy.deepcopy(parameters) + else: + #: dictionary of the model + self.model = model = None + # layer attributes initialized in meta class or _initialize() + # for k, v in layer_cls_names.iteritems(): + # setattr(self, k, v) + # XXX: this seems bad to initialize attributes outside of constructor + #: dictionary of model layer classes + self.layers = {} + #: state of model, initialized or uninitialized + self._state = 'uninitialized' + # need either model file or model and layer class names to initialize + ready_to_initialize = ((modelfile is not None or model is not None) and + meta.layer_cls_names is not None) + if ready_to_initialize: + self._initialize() # initialize using modelfile or model + + @property + def state(self): + """ + current state of the model + """ + return self._state + + def _load(self, layer=None): + """ + Load or update all or part of :attr:`model`. + + :param layer: Optionally load only specified layer. + :type layer: str + """ + # open model file for reading and convert JSON object to dictionary + # read and load JSON parameter map file as "parameters" + with open(self.param_file, 'r') as param_file: + file_params = json.load(param_file) + for layer, params in file_params.iteritems(): + # update parameters from file + self.parameters[layer] = ModelParameter(**params) + # if layer argument spec'd then only update/load spec'd layer + if not layer or not self.model: + # update/load model if layer not spec'd or if no model exists yet + # TODO: separate model and parameters according to comments in #78 + self.model = copy.deepcopy(self.parameters) + else: + # convert non-sequence to tuple + layers = _listify(layer) + # update/load layers + for layer in layers: + self.model[layer] = copy.deepcopy(self.parameters[layer]) + + def _update(self, layer=None): + """ + Update layers in model. + """ + meta = getattr(self, ModelBase._meta_attr) + if not layer: + layers = self.layers + else: + # convert non-sequence to tuple + layers = _listify(layer) + for layer in layers: + # relative path to layer files from model file + path = os.path.abspath(os.path.join(meta.modelpath, layer)) + getattr(self, layer).load(path) + + def _initialize(self): + """ + Initialize model and layers. + """ + meta = getattr(self, ModelBase._meta_attr) + # read modelfile, convert JSON and load/update model + if self.param_file is not None: + self._load() + LOGGER.debug('model:\n%r', self.model) + # initialize layers + # FIXME: move import inside loop for custom layers in different modules + mod = importlib.import_module(meta.layers_mod, meta.layers_pkg) + src_model = {} + for layer, value in self.model.iteritems(): + # from layers module get the layer's class definition + layer_cls = getattr(mod, meta.layer_cls_names[layer]) # class def + self.layers[layer] = layer_cls # add layer class def to model + # check if model layers are classes + src_value = {} # layer value generated from source classes + for src in value['sources']: + # check if source has keyword arguments + try: + src, kwargs = src + except (TypeError, ValueError): + kwargs = {} # no key work arguments + # skip if not a source class + if isinstance(src, basestring): + continue + # generate layer value from source class + src_value[src.__name__] = {'module': src.__module__, + 'package': None} + # update layer keyword arguments + src_value[src.__name__].update(kwargs) + # use layer values generated from source class + if src_value: + value = src_model[layer] = src_value + else: + srcmod, srcpkg = value.get('module'), value.get('package') + try: + value = dict(value['sources']) + except ValueError: + value = dict.fromkeys(value['sources'], {}) + for src in value.viewkeys(): + if srcmod is not None: + value[src]['module'] = srcmod + if srcpkg is not None: + value[src]['package'] = srcpkg + # set layer attribute with model data + setattr(self, layer, layer_cls(value)) + # update model with layer values generated from source classes + if src_model: + self.model.update(src_model) + self._update() + self._state = 'initialized' + + def load(self, modelfile, layer=None): + """ + Load or update a model or layers in a model. + + :param modelfile: The name of the json file to load. + :type modelfile: str + :param layer: Optionally load only specified layer. + :type layer: str + """ + # read modelfile, convert JSON and load/update model + self.param_file = modelfile + self._load(layer) + self._update(layer) + + def edit(self, layer, item, delete=False): + """ + Edit model. + + :param layer: Layer of model to edit + :type layer: str + :param item: Items to edit. + :type item: dict + :param delete: Flag to return + :class:`~carousel.core.layers.Layer` to delete item. + :type delete: bool + """ + # get layer attribute with model data + if hasattr(self, layer): + layer_obj = getattr(self, layer) + else: + raise AttributeError('missing layer: %s', layer) + if delete: + return layer_obj + # iterate over items and edit layer + for k, v in item.iteritems(): + if k in layer_obj.layer: + layer_obj.edit(k, v) # edit layer + else: + raise AttributeError('missing layer item: %s', k) + # update model data + if k in self.model[layer]: + self.model[layer][k].update(v) + else: + raise AttributeError('missing model layer item: %s', k) + + def add(self, layer, items): + """ + Add items in model. + """ + for k in items.iterkeys(): + if k in self.model[layer]: + raise Exception('item %s is already in layer %s' % (k, layer)) + self.model[layer].update(items) + # this should also update Layer.layer, the layer data + # same as calling layer constructor + # so now just need to add items to the layer + for k, v in items.iteritems(): + getattr(self, layer).add(k, v['module'], v.get('package')) + + def delete(self, layer, items): + """ + Delete items in model. + """ + # Use edit to get the layer obj containing item + items = _listify(items) # make items a list if it's not + layer_obj = self.edit(layer, dict.fromkeys(items), delete=True) + for k in items: + if k in layer_obj.layer: + layer_obj.delete(k) + else: + raise AttributeError('item %s missing from layer %s' % + (k, layer)) + # don't need to pop items from self.model, because, self.layer + # points to the same object as the item in model! + # for example: + # (Pdb) id(self.model['data']) # same ID as layer in data + # 125639560L + # (Pdb) id(self.data.layer) # same ID as data in model + # 125639560L + + def save(self, modelfile, layer=None): + """ + Save a model file. + + :param modelfile: The name of the json file to save. + :type modelfile: str + :param layer: Optionally save only specified layer. + :type layer: str + """ + if layer: + obj = {layer: self.model[layer]} + else: + obj = self.model + with open(modelfile, 'w') as fp: + json.dump(obj, fp, indent=2, sort_keys=True) + + @property + def registries(self): + return {layer: getattr(self, layer).reg + for layer in self.layers} + + @property + def cmd_layer(self): + meta = getattr(self, ModelBase._meta_attr) + return getattr(self, meta.cmd_layer_name, NotImplemented) + + @property + def commands(self): + return self.cmd_layer.reg.commands + + def command(self, cmd, progress_hook=None, *args, **kwargs): + """ + Execute a model command. + + :param cmd: Name of the command. + :param progress_hook: A function to which progress updates are passed. + """ + cmds = cmd.split(None, 1) # split commands and simulations + sim_names = cmds[1:] # simulations + if not sim_names: + sim_names = self.cmd_layer.reg.iterkeys() + for sim_name in sim_names: + sim_cmd = getattr(self.cmd_layer.reg[sim_name], cmd) + sim_cmd(self, progress_hook=progress_hook, *args, **kwargs) diff --git a/carousel/core/outputs.py b/carousel/core/outputs.py index 1288aa1..75cea54 100644 --- a/carousel/core/outputs.py +++ b/carousel/core/outputs.py @@ -47,7 +47,7 @@ def register(self, new_outputs, *args, **kwargs): :param new_outputs: new outputs to register. """ - kwargs.update(zip(self.meta_names, args)) + kwargs.update(list(zip(self.meta_names, args))) # call super method super(OutputRegistry, self).register(new_outputs, **kwargs) @@ -78,7 +78,7 @@ def __new__(mcs, name, bases, attr): return super(OutputBase, mcs).__new__(mcs, name, bases, attr) -class Output(object): +class Output(object, metaclass=OutputBase): """ A class for formatting outputs. @@ -103,7 +103,6 @@ class PVPowerOutputs(Output): hourly_energy = {'init': 0, 'units': 'Wh', 'size': 8760} yearly_energy = {'init': 0, 'units': 'kWh'} """ - __metaclass__ = OutputBase def __init__(self): #: outputs initial value @@ -126,7 +125,7 @@ def __init__(self): self.output_source = {} #: calculation outputs self.outputs = {} - for k, v in self.parameters.iteritems(): + for k, v in self.parameters.items(): self.initial_value[k] = v.get('init') # returns None if missing self.size[k] = v.get('size') or 1 # minimum size is 1 self.uncertainty[k] = None # uncertainty for outputs is calculated diff --git a/carousel/core/outputs.py.bak b/carousel/core/outputs.py.bak new file mode 100644 index 0000000..1288aa1 --- /dev/null +++ b/carousel/core/outputs.py.bak @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- + +""" +This module provides the framework for output from Carousel. It is similar +to the data layer except output sources are always calculations. +""" + +from carousel.core import logging, CommonBase, UREG, Q_, Registry, Parameter +import json +import numpy as np + +LOGGER = logging.getLogger(__name__) + + +class OutputParameter(Parameter): + """ + Fields for outputs. + """ + _attrs = ['units', 'init', 'size', 'isconstant', 'isproperty', 'timeseries'] + + +class OutputRegistry(Registry): + """ + A registry for output from calculations. + """ + meta_names = [ + 'initial_value', 'size', 'uncertainty', 'variance', 'jacobian', + 'isconstant', 'isproperty', 'timeseries', 'output_source' + ] + + def register(self, new_outputs, *args, **kwargs): + """ + Register outputs and metadata. + + * ``initial_value`` - used in dynamic calculations + * ``size`` - number of elements per timestep + * ``uncertainty`` - in percent of nominal value + * ``variance`` - dictionary of covariances, diagonal is square of + uncertianties, no units + * ``jacobian`` - dictionary of sensitivities dxi/dfj + * ``isconstant`` - ``True`` if constant, ``False`` if periodic + * ``isproperty`` - ``True`` if output stays at last value during + thresholds, ``False`` if reverts to initial value + * ``timeseries`` - name of corresponding time series output, ``None`` if + no time series + * ``output_source`` - name + + :param new_outputs: new outputs to register. + """ + kwargs.update(zip(self.meta_names, args)) + # call super method + super(OutputRegistry, self).register(new_outputs, **kwargs) + + +class OutputBase(CommonBase): + """ + Metaclass for outputs. + + Setting the ``__metaclass__`` attribute to :class:`OutputBase` adds the + full path to the specified output parameter file as ``param_file`` or + adds ``parameters`` with outputs specified. Also checks that outputs is a + subclass of :class:`Output`. Sets `output_path` and `output_file` as the + class attributes that specify the parameter file full path. + """ + _path_attr = 'outputs_path' + _file_attr = 'outputs_file' + _param_cls = OutputParameter + + def __new__(mcs, name, bases, attr): + # use only with Output subclasses + if not CommonBase.get_parents(bases, OutputBase): + return super(OutputBase, mcs).__new__(mcs, name, bases, attr) + # set _meta combined from bases + attr = mcs.set_meta(bases, attr) + # set param file full path if outputs path and file specified or + # try to set parameters from class attributes except private/magic + attr = mcs.set_param_file_or_parameters(attr) + return super(OutputBase, mcs).__new__(mcs, name, bases, attr) + + +class Output(object): + """ + A class for formatting outputs. + + Do not use this class directly. Instead subclass it in your output model and + list the path and file of the outputs parameters or provide the parameters + as class members. + + Example of specified output parameter file:: + + import os + + PROJ_PATH = os.path.join('project', 'path') # project path + + + class PVPowerOutputs(Output): + outputs_file = 'pvpower.json' + outputs_path = os.path.join(PROJ_PATH, 'outputs') + + Example of specified output parameters:: + + class PVPowerOutputs(Output): + hourly_energy = {'init': 0, 'units': 'Wh', 'size': 8760} + yearly_energy = {'init': 0, 'units': 'kWh'} + """ + __metaclass__ = OutputBase + + def __init__(self): + #: outputs initial value + self.initial_value = {} + #: size of outputs + self.size = {} + #: outputs uncertainty + self.uncertainty = {} + #: variance + self.variance = {} + #: jacobian + self.jacobian = {} + #: outputs isconstant flag + self.isconstant = {} + #: outputs isproperty flag + self.isproperty = {} + #: name of corresponding time series, ``None`` if no time series + self.timeseries = {} + #: name of :class:`Output` superclass + self.output_source = {} + #: calculation outputs + self.outputs = {} + for k, v in self.parameters.iteritems(): + self.initial_value[k] = v.get('init') # returns None if missing + self.size[k] = v.get('size') or 1 # minimum size is 1 + self.uncertainty[k] = None # uncertainty for outputs is calculated + self.isconstant[k] = v.get('isconstant', False) # True or False + self.isproperty[k] = v.get('isproperty', False) # True or False + units = str(v.get('units', '')) # default is non-dimensional + # NOTE: np.empty is faster than zeros! + self.outputs[k] = Q_(np.zeros((1, self.size[k])), UREG(units)) + # NOTE: Initial values are assigned and outputs resized when + # simulation "start" method is called from the model. + self.timeseries[k] = v.get('timeseries') # None if not time series + self.output_source[k] = self.__class__.__name__ # output source diff --git a/carousel/core/simulations.py b/carousel/core/simulations.py index 737e5d2..085c833 100644 --- a/carousel/core/simulations.py +++ b/carousel/core/simulations.py @@ -14,7 +14,7 @@ import os import sys import numpy as np -import Queue +import queue import functools from datetime import datetime @@ -47,11 +47,11 @@ def id_maker(obj): def sim_progress_hook(format_args, display_header=False): - if isinstance(format_args, basestring): + if isinstance(format_args, str): format_str = '---------- %s ----------\n' else: idx = format_args[0] - fields, values = zip(*format_args[1:]) + fields, values = list(zip(*format_args[1:])) format_str = '\r%5d' + ' %10.4g' * len(values) if display_header: units = (str(v.dimensionality) for v in values) @@ -79,19 +79,19 @@ def topological_sort(dag): `_ """ # find all edges of dag - topsort = [node for node, edge in dag.iteritems() if not edge] + topsort = [node for node, edge in dag.items() if not edge] # loop through nodes until topologically sorted while len(topsort) < len(dag): num_nodes = len(topsort) # number of nodes # unsorted nodes - for node in dag.viewkeys() - set(topsort): + for node in dag.keys() - set(topsort): # nodes with no incoming edges if set(dag[node]) <= set(topsort): topsort.append(node) break # circular dependencies if len(topsort) == num_nodes: - raise CircularDependencyError(dag.viewkeys() - set(topsort)) + raise CircularDependencyError(dag.keys() - set(topsort)) return topsort @@ -116,7 +116,7 @@ def register(self, sim, *args, **kwargs): :param sim: new simulation """ - kwargs.update(zip(self.meta_names, args)) + kwargs.update(list(zip(self.meta_names, args))) # call super method, now meta can be passed as args or kwargs. super(SimRegistry, self).register(sim, **kwargs) @@ -153,7 +153,7 @@ def __new__(mcs, name, bases, attr): return super(SimBase, mcs).__new__(mcs, name, bases, attr) -class Simulation(object): +class Simulation(object, metaclass=SimBase): """ A class for simulations. @@ -173,7 +173,6 @@ class attributes in a subclass or a combination of all 3 methods. Any additional settings provided as keyword arguments will override settings from file. """ - __metaclass__ = SimBase attrs = { 'ID': None, 'path': os.path.join('~', 'Carousel', 'Simulations'), @@ -201,7 +200,7 @@ def __init__(self, simfile=None, settings=None, **kwargs): file_params = json.load(param_file) #: simulation parameters from file self.parameters = {settings: SimParameter(**params) for - settings, params in file_params.iteritems()} + settings, params in file_params.items()} # if not subclassed and metaclass skipped, then use kwargs if not hasattr(self, 'parameters'): #: parameter file @@ -211,7 +210,7 @@ def __init__(self, simfile=None, settings=None, **kwargs): else: # use first settings if settings is None: - self.settings, self.parameters = self.parameters.items()[0] + self.settings, self.parameters = list(self.parameters.items())[0] else: #: name of sim settings used for parameters self.settings = settings @@ -225,13 +224,13 @@ def __init__(self, simfile=None, settings=None, **kwargs): self.write_frequency = 0 self.write_fields = {} # pop deprecated attribute names - for k, v in self.deprecated.iteritems(): + for k, v in self.deprecated.items(): val = self.parameters['extras'].pop(v, None) # update parameters if deprecated attr used and no new attr if val and k not in self.parameters: self.parameters[k] = val # Attributes - for k, v in self.attrs.iteritems(): + for k, v in self.attrs.items(): setattr(self, k, self.parameters.get(k, v)) # member docstrings are in documentation since attrs are generated if self.ID is None: @@ -242,12 +241,12 @@ def __init__(self, simfile=None, settings=None, **kwargs): self.path = os.path.expandvars(os.path.expanduser(self.path)) self.path = os.path.abspath(self.path) # convert simulation interval to Pint Quantity - if isinstance(self.interval, basestring): + if isinstance(self.interval, str): self.interval = UREG(self.interval) elif not isinstance(self.interval, Q_): self.interval = self.interval[0] * UREG(str(self.interval[1])) # convert simulation length to Pint Quantity - if isinstance(self.sim_length, basestring): + if isinstance(self.sim_length, str): self.sim_length = UREG(self.sim_length) elif not isinstance(self.sim_length, Q_): self.sim_length = self.sim_length[0] * UREG(str(self.sim_length[1])) @@ -266,7 +265,7 @@ def __init__(self, simfile=None, settings=None, **kwargs): #: order of calculations self.calc_order = [] #: command queue - self.cmd_queue = Queue.Queue() + self.cmd_queue = queue.Queue() #: index iterator self.idx_iter = self.index_iterator() #: data loaded status @@ -401,7 +400,7 @@ def start(self, model, progress_hook=None): _initial_value = out_reg.initial_value[k] if not _initial_value: continue - if isinstance(_initial_value, basestring): + if isinstance(_initial_value, str): # initial value is from data registry # assign in a scalar to a vector fills in the vector, yes! out_reg[k][-1] = data_reg[_initial_value] @@ -443,14 +442,14 @@ def start(self, model, progress_hook=None): self.interval_idx = idx_tot # update simulation interval counter idx = idx_tot % self.write_frequency # update properties - for k, v in out_reg.isproperty.iteritems(): + for k, v in out_reg.isproperty.items(): # set properties from previous interval at night if v: out_reg[k][idx] = out_reg[k][idx - 1] # night if any threshold exceeded if self.thresholds: night = not all(limits[0] < data_reg[data][idx] < limits[1] for - data, limits in self.thresholds.iteritems()) + data, limits in self.thresholds.items()) else: night = None # daytime or always calculated outputs @@ -495,7 +494,7 @@ def start(self, model, progress_hook=None): header=save_header, comments='') try: cmd = self.cmd_queue.get_nowait() - except Queue.Empty: + except queue.Empty: continue if cmd == 'pause': self._ispaused = True @@ -541,7 +540,7 @@ def load(self, model, progress_hook=None, *args, **kwargs): data = kwargs.get('data', {}) if not data and args: data = args[0] - for k, v in data.iteritems(): + for k, v in data.items(): progress_hook('loading simulation for %s' % k) model.data.open(k, **v) self.check_data(model.data) diff --git a/carousel/core/simulations.py.bak b/carousel/core/simulations.py.bak new file mode 100644 index 0000000..737e5d2 --- /dev/null +++ b/carousel/core/simulations.py.bak @@ -0,0 +1,555 @@ +# -*- coding: utf-8 -*- +""" +This is the Simulation module. The Simulation layer takes of creating output +variables, writing data to disk, iterating over data and calculations at +each interval in the simulation and setting any parameters required to perform +the simulation. It gets all its info from the model, which in turn gets it from +each layer which gets info from the layers' sources. +""" + +from carousel.core import logging, CommonBase, Registry, UREG, Q_, Parameter +from carousel.core.exceptions import CircularDependencyError, MissingDataError +import json +import errno +import os +import sys +import numpy as np +import Queue +import functools +from datetime import datetime + +LOGGER = logging.getLogger(__name__) + + +def mkdir_p(path): + """ + http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python + :param path: path to make recursively + """ + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST and os.path.isdir(path): + pass + else: + raise exc + + +def id_maker(obj): + """ + Makes an ID from the object's class name and the datetime now in ISO format. + + :param obj: the class from which to make the ID + :return: ID + """ + dtfmt = '%Y%m%d-%H%M%S' + return '%s-%s' % (obj.__class__.__name__, datetime.now().strftime(dtfmt)) + + +def sim_progress_hook(format_args, display_header=False): + if isinstance(format_args, basestring): + format_str = '---------- %s ----------\n' + else: + idx = format_args[0] + fields, values = zip(*format_args[1:]) + format_str = '\r%5d' + ' %10.4g' * len(values) + if display_header: + units = (str(v.dimensionality) for v in values) + units = tuple(['n/d' if u == 'dimensionless' else u + for u in units]) + format_args = fields + units + (idx,) + values + format_units = ('units' + ' %10s' * len(units)) + '\n' + fmt_header = ('index' + ' %10s' * len(fields)) + '\n' + format_str = fmt_header + format_units + format_str + else: + format_args = (idx,) + values + sys.stdout.write(format_str % format_args) + + +def topological_sort(dag): + """ + topological sort + + :param dag: directed acyclic graph + :type dag: dict + + .. seealso:: `Topographical Sorting + `_, + `Directed Acyclic Graph (DAG) + `_ + """ + # find all edges of dag + topsort = [node for node, edge in dag.iteritems() if not edge] + # loop through nodes until topologically sorted + while len(topsort) < len(dag): + num_nodes = len(topsort) # number of nodes + # unsorted nodes + for node in dag.viewkeys() - set(topsort): + # nodes with no incoming edges + if set(dag[node]) <= set(topsort): + topsort.append(node) + break + # circular dependencies + if len(topsort) == num_nodes: + raise CircularDependencyError(dag.viewkeys() - set(topsort)) + return topsort + + +class SimParameter(Parameter): + _attrs = ['ID', 'path', 'commands', 'data', 'thresholds', 'interval', + 'sim_length', 'display_frequency', 'display_fields', + 'write_frequency', 'write_fields'] + + +class SimRegistry(Registry): + """ + Registry for simulations. + """ + #: meta names + meta_names = ['commands'] + + def register(self, sim, *args, **kwargs): + """ + register simulation and metadata. + + * ``commands`` - list of methods to callable from model + + :param sim: new simulation + """ + kwargs.update(zip(self.meta_names, args)) + # call super method, now meta can be passed as args or kwargs. + super(SimRegistry, self).register(sim, **kwargs) + + +class SimBase(CommonBase): + """ + Meta class for simulations. + """ + _path_attr = 'sim_path' + _file_attr = 'sim_file' + _attributes = 'attrs' + _deprecated = 'deprecated' + _param_cls = SimParameter + + def __new__(mcs, name, bases, attr): + # use only with Simulation subclasses + if not CommonBase.get_parents(bases, SimBase): + LOGGER.debug('bases:\n%r', bases) + return super(SimBase, mcs).__new__(mcs, name, bases, attr) + # set _meta combined from bases + attr = mcs.set_meta(bases, attr) + # let some attributes in subclasses be override super + attributes = attr.pop(mcs._attributes, None) + deprecated = attr.pop(mcs._deprecated, None) + # set param file full path if simulations path and file specified or + # try to set parameters from class attributes except private/magic + attr = mcs.set_param_file_or_parameters(attr) + # reset subclass attributes + if attributes is not None: + attr[mcs._attributes] = attributes + if deprecated is not None: + attr[mcs._deprecated] = deprecated + LOGGER.debug('attibutes:\n%r', attr) + return super(SimBase, mcs).__new__(mcs, name, bases, attr) + + +class Simulation(object): + """ + A class for simulations. + + :param simfile: Filename of simulation configuration file. + :type simfile: str + :param settings: keyword name of simulation parameter to use for settings + :type str: + + Simulation attributes can be passed directly as keyword arguments directly + to :class:`~carousel.core.simulations.Simulation` or in a JSON file or as + class attributes in a subclass or a combination of all 3 methods. + + To get a list of :class:`~carousel.core.simulations.Simulation` attributes + and defaults get the :attr:`~carousel.core.simulations.Simulation.attrs` + attribute. + + Any additional settings provided as keyword arguments will override settings + from file. + """ + __metaclass__ = SimBase + attrs = { + 'ID': None, + 'path': os.path.join('~', 'Carousel', 'Simulations'), + 'commands': ['start', 'pause'], + 'data': None, + 'thresholds': None, + 'interval': 1 * UREG.hour, + 'sim_length': 1 * UREG.year, + 'display_frequency': 1, + 'display_fields': None, + 'write_frequency': 8760, + 'write_fields': None + } + deprecated = { + 'interval': 'interval_length', + 'sim_length': 'simulation_length' + } + + def __init__(self, simfile=None, settings=None, **kwargs): + # load simfile if it's an argument + if simfile is not None: + # read and load JSON parameter map file as "parameters" + self.param_file = simfile + with open(self.param_file, 'r') as param_file: + file_params = json.load(param_file) + #: simulation parameters from file + self.parameters = {settings: SimParameter(**params) for + settings, params in file_params.iteritems()} + # if not subclassed and metaclass skipped, then use kwargs + if not hasattr(self, 'parameters'): + #: parameter file + self.param_file = None + #: simulation parameters from keyword arguments + self.parameters = kwargs + else: + # use first settings + if settings is None: + self.settings, self.parameters = self.parameters.items()[0] + else: + #: name of sim settings used for parameters + self.settings = settings + self.parameters = self.parameters[settings] + # use any keyword arguments instead of parameters + self.parameters.update(kwargs) + # make pycharm happy - attributes assigned in loop by attrs + self.thresholds = {} + self.display_frequency = 0 + self.display_fields = {} + self.write_frequency = 0 + self.write_fields = {} + # pop deprecated attribute names + for k, v in self.deprecated.iteritems(): + val = self.parameters['extras'].pop(v, None) + # update parameters if deprecated attr used and no new attr + if val and k not in self.parameters: + self.parameters[k] = val + # Attributes + for k, v in self.attrs.iteritems(): + setattr(self, k, self.parameters.get(k, v)) + # member docstrings are in documentation since attrs are generated + if self.ID is None: + # generate id from object class name and datetime in ISO format + self.ID = id_maker(self) + if self.path is not None: + # expand environment variables, ~ and make absolute path + self.path = os.path.expandvars(os.path.expanduser(self.path)) + self.path = os.path.abspath(self.path) + # convert simulation interval to Pint Quantity + if isinstance(self.interval, basestring): + self.interval = UREG(self.interval) + elif not isinstance(self.interval, Q_): + self.interval = self.interval[0] * UREG(str(self.interval[1])) + # convert simulation length to Pint Quantity + if isinstance(self.sim_length, basestring): + self.sim_length = UREG(self.sim_length) + elif not isinstance(self.sim_length, Q_): + self.sim_length = self.sim_length[0] * UREG(str(self.sim_length[1])) + # convert simulation length to interval units to calc total intervals + sim_to_interval_units = self.sim_length.to(self.interval.units) + #: total number of intervals simulated + self.number_intervals = np.ceil(sim_to_interval_units / self.interval) + #: interval index, start at zero + self.interval_idx = 0 + #: pause status + self._ispaused = False + #: finished status + self._iscomplete = False + #: initialized status + self._isinitialized = False + #: order of calculations + self.calc_order = [] + #: command queue + self.cmd_queue = Queue.Queue() + #: index iterator + self.idx_iter = self.index_iterator() + #: data loaded status + self._is_data_loaded = False + + @property + def ispaused(self): + """ + Pause property, read only. True if paused. + """ + return self._ispaused + + @property + def iscomplete(self): + """ + Completion property, read only. True if finished. + """ + return self._iscomplete + + @property + def isinitialized(self): + """ + Initialization property, read only. True if initialized. + """ + return self._isinitialized + + @property + def is_data_loaded(self): + """ + Data loaded property, read only. True if data loaded. + """ + return self._is_data_loaded + + def check_data(self, data): + """ + Check if data loaded for all sources in data layer. + + :param data: data layer from model + :type data: :class:`~carousel.core.layer.Data` + :return: dictionary of data sources and objects or `None` if not loaded + """ + data_objs = { + data_src: data.objects.get(data_src) for data_src in data.layer + } + self._is_data_loaded = all(data_objs.values()) + return data_objs + + def initialize(self, calc_reg): + """ + Initialize the simulation. Organize calculations by dependency. + + :param calc_reg: Calculation registry. + :type calc_reg: + :class:`~carousel.core.calculation.CalcRegistry` + """ + self._isinitialized = True + # TODO: if calculations are edited, loaded, added, etc. then reset + self.calc_order = topological_sort(calc_reg.dependencies) + + def index_iterator(self): + """ + Generator that resumes from same index, or restarts from sent index. + """ + idx = 0 # index + while idx < self.number_intervals: + new_idx = yield idx + idx += 1 + if new_idx: + idx = new_idx - 1 + + # TODO: change start to run + + def start(self, model, progress_hook=None): + """ + Start the simulation from time zero. + + :param model: Model with layers and registries containing parameters + :type: :class:`~carousel.core.models.Model` + :param progress_hook: A function that receives either a string or a + list containing the index followed by tuples of the data or outputs + names and values specified by ``write_fields`` in the simfile. + :type progress_hook: function + + + The model registries should contain the following layer registries: + * :class:`~carousel.core.data_sources.DataRegistry`, + * :class:`~carousel.core.formulas.FormulaRegistry`, + * :class:`~carousel.core.outputs.OutputRegistry`, + * :class:`~carousel.core.calculation.CalcRegistry` + """ + # check if data loaded + data_objs = self.check_data(model.data) + if not self.is_data_loaded: + raise MissingDataError([ds for ds in data_objs if ds is None]) + # get layer registries + data_reg = model.registries['data'] + formula_reg = model.registries['formulas'] + out_reg = model.registries['outputs'] + calc_reg = model.registries['calculations'] + # initialize + if not self.isinitialized: + self.initialize(calc_reg) + # default progress hook + if not progress_hook: + progress_hook = functools.partial( + sim_progress_hook, display_header=True + ) + # start, resume or restart + if self.ispaused: + # if paused, then resume, do not resize outputs again. + self._ispaused = False # change pause state + progress_hook('resume simulation') + elif self.iscomplete: + # if complete, then restart, do not resize outputs again. + self._iscomplete = False # change pause state + progress_hook('restart simulation') + self.idx_iter = self.index_iterator() + else: + # resize outputs + # assumes that self.write_frequency is immutable + # TODO: allow self.write_frequency to be changed + # only resize outputs first time simulation is started + # repeat output rows to self.write_frequency + # put initial conditions of outputs last so it's copied when + # idx == 0 + progress_hook('resize outputs') # display progress + for k in out_reg: + if out_reg.isconstant[k]: + continue + # repeat rows (axis=0) + out_reg[k] = out_reg[k].repeat(self.write_frequency, 0) + _initial_value = out_reg.initial_value[k] + if not _initial_value: + continue + if isinstance(_initial_value, basestring): + # initial value is from data registry + # assign in a scalar to a vector fills in the vector, yes! + out_reg[k][-1] = data_reg[_initial_value] + else: + out_reg[k][-1] = _initial_value * out_reg[k].units + progress_hook('start simulation') + # check and/or make Carousel_Simulations and simulation ID folders + mkdir_p(self.path) + sim_id_path = os.path.join(self.path, self.ID) + mkdir_p(sim_id_path) + # header & units for save files + data_fields = self.write_fields.get('data', []) # any data fields + out_fields = self.write_fields.get('outputs', []) # any outputs fields + save_header = tuple(data_fields + out_fields) # concatenate fields + # get units as strings from data & outputs + data_units = [str(data_reg[f].dimensionality) for f in data_fields] + out_units = [str(out_reg[f].dimensionality) for f in out_fields] + save_units = tuple(data_units + out_units) # concatenate units + # string format for header & units + save_str = ('%s' + ',%s' * (len(save_header) - 1)) + '\n' # format + save_header = (save_str * 2) % (save_header + save_units) # header + save_header = save_header[:-1] # remove trailing new line + # =================== + # Static calculations + # =================== + progress_hook('static calcs') + for calc in self.calc_order: + if not calc_reg.is_dynamic[calc]: + calc_reg.calculator[calc].calculate( + calc_reg[calc], formula_reg, data_reg, out_reg + ) + # ==================== + # Dynamic calculations + # ==================== + progress_hook('dynamic calcs') + # TODO: assumes that interval size and indices are same, but should + # interpolate for any size interval or indices + for idx_tot in self.idx_iter: + self.interval_idx = idx_tot # update simulation interval counter + idx = idx_tot % self.write_frequency + # update properties + for k, v in out_reg.isproperty.iteritems(): + # set properties from previous interval at night + if v: + out_reg[k][idx] = out_reg[k][idx - 1] + # night if any threshold exceeded + if self.thresholds: + night = not all(limits[0] < data_reg[data][idx] < limits[1] for + data, limits in self.thresholds.iteritems()) + else: + night = None + # daytime or always calculated outputs + for calc in self.calc_order: + # Determine if calculation is scheduled for this timestep + # TODO: add ``start_at`` parameter combined with ``frequency`` + freq = calc_reg.frequency[calc] + if not freq.dimensionality: + is_scheduled = (idx_tot % freq) == 0 + else: + # Frequency with units of time + is_scheduled = ((idx_tot * self.interval) % freq) == 0 + is_scheduled = ( + is_scheduled and (not night or calc_reg.always_calc[calc]) + ) + if calc_reg.is_dynamic[calc] and is_scheduled: + calc_reg.calculator[calc].calculate( + calc_reg[calc], formula_reg, data_reg, out_reg, + timestep=self.interval, idx=idx + ) + # display progress + if not (idx % self.display_frequency): + progress_hook(self.format_progress(idx, data_reg, out_reg)) + # disp_head = False + # create an index for the save file, 0 if not saving + if not ((idx_tot + 1) % self.write_frequency): + savenum = (idx_tot + 1) / self.write_frequency + elif idx_tot == self.number_intervals - 1: + # save file index should be integer! + savenum = int(np.ceil((idx_tot + 1) / + float(self.write_frequency))) + else: + savenum = 0 # not saving this iteration + # save file to disk + if savenum: + savename = self.ID + '_' + str(savenum) + '.csv' # filename + savepath = os.path.join(sim_id_path, savename) # path + # create array of all data & outputs to save + save_array = self.format_write(data_reg, out_reg, idx + 1) + # save as csv using default format & turn comments off + np.savetxt(savepath, save_array, delimiter=',', + header=save_header, comments='') + try: + cmd = self.cmd_queue.get_nowait() + except Queue.Empty: + continue + if cmd == 'pause': + self._ispaused = True + return + self._iscomplete = True # change completion status + + def format_progress(self, idx, data_reg, out_reg): + data_fields = self.display_fields.get('data', []) # data fields + data_args = [(f, data_reg[f][idx]) for f in data_fields] + out_fields = self.display_fields.get('outputs', []) # outputs fields + out_args = [(f, out_reg[f][idx]) for f in out_fields] + return [idx] + data_args + out_args + + def format_write(self, data_reg, out_reg, idx=None): + data_fields = self.write_fields.get('data', []) # any data fields + data_args = [data_reg[f][:idx].reshape((-1, 1)) for f in data_fields] + out_fields = self.write_fields.get('outputs', []) # any outputs fields + out_args = [out_reg[f][:idx] for f in out_fields] + return np.concatenate(data_args + out_args, axis=1) + + def pause(self, progress_hook=None): + """ + Pause the simulation. How is this different from stopping it? Maintain + info sufficient to restart simulation. Sets ``is_paused`` to True. + Will this state allow analysis? changing parameters? What can you do + with a paused simulation? + Should be capable of saving paused simulation for loading/resuming + later, that is the main usage. EG: someone else need computer, or power + goes out, so on battery backup quickly pause simulation, and save. + Is save automatic? Should there be a parameter for auto save changed? + """ + # default progress hook + if progress_hook is None: + progress_hook = sim_progress_hook + progress_hook('simulation paused') + self.cmd_queue.put('pause') + self._ispaused = True + + def load(self, model, progress_hook=None, *args, **kwargs): + # default progress hook + if progress_hook is None: + progress_hook = sim_progress_hook + data = kwargs.get('data', {}) + if not data and args: + data = args[0] + for k, v in data.iteritems(): + progress_hook('loading simulation for %s' % k) + model.data.open(k, **v) + self.check_data(model.data) + + def run(self, model, progress_hook=None, *args, **kwargs): + # default progress hook + if progress_hook is None: + progress_hook = sim_progress_hook + progress_hook('running simulation') + self.load(model, progress_hook, *args, **kwargs) + self.start(model, progress_hook) diff --git a/carousel/docs/conf.py b/carousel/docs/conf.py index 8dbdc4e..9723071 100644 --- a/carousel/docs/conf.py +++ b/carousel/docs/conf.py @@ -53,8 +53,8 @@ master_doc = 'index' # General information about the project. -project = u'Carousel' -copyright = u'2016, SunPower' +project = 'Carousel' +copyright = '2016, SunPower' author = __author__ # The version info for the project you're documenting, acts as replacement for @@ -245,7 +245,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, '%s.tex' % __name__, u'%s Documentation' % __name__, + (master_doc, '%s.tex' % __name__, '%s Documentation' % __name__, __author__, 'manual'), ] @@ -275,7 +275,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, __name__.lower(), u'%s Documentation' % __name__, + (master_doc, __name__.lower(), '%s Documentation' % __name__, [author], 1) ] @@ -289,7 +289,7 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, __name__, u'%s Documentation' % __name__, + (master_doc, __name__, '%s Documentation' % __name__, author, __name__, 'One line description of project.', 'Miscellaneous'), ] diff --git a/carousel/docs/conf.py.bak b/carousel/docs/conf.py.bak new file mode 100644 index 0000000..8dbdc4e --- /dev/null +++ b/carousel/docs/conf.py.bak @@ -0,0 +1,314 @@ +# -*- coding: utf-8 -*- +# +# Carousel documentation build configuration file, created by +# sphinx-quickstart on Wed Feb 10 14:16:34 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath(os.path.join('..', '..'))) + +from carousel import __version__, __release__, __author__, __name__ + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Carousel' +copyright = u'2016, SunPower' +author = __author__ + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = __version__ +# The full version, including alpha/beta/rc tags. +release = __release__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + 'logo': 'sp_2014_logo_black_orange_rgb.png', + 'logo_name': True, + 'description': 'Model Simulation Framework', + 'github_user': 'SunPower', + 'github_repo': 'Carousel', + 'github_banner': True, + 'travis_button': True, + 'show_related': True +} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = 'sp_2014_logo_black_orange_rgb.png' + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +html_favicon = 'favicon.ico' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +html_sidebars = { + '**': [ + 'about.html', + 'navigation.html', + 'searchbox.html', + 'relations.html' + ] +} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = '%sdoc' % __name__ + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, '%s.tex' % __name__, u'%s Documentation' % __name__, + __author__, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, __name__.lower(), u'%s Documentation' % __name__, + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, __name__, u'%s Documentation' % __name__, + author, __name__, 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'numpy': ('https://docs.scipy.org/doc/numpy/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None), + 'python': ('https://docs.python.org/', None)} diff --git a/carousel/tests/test_calcs.py b/carousel/tests/test_calcs.py index f792625..b2f65a7 100644 --- a/carousel/tests/test_calcs.py +++ b/carousel/tests/test_calcs.py @@ -61,7 +61,7 @@ class CalcTest2(Calc): calc_test2 = CalcTest2() ok_(isinstance(calc_test2, Calc)) - for k, v in calc_test1.parameters.iteritems(): + for k, v in calc_test1.parameters.items(): eq_(calc_test2.parameters[k], v) @@ -92,7 +92,7 @@ def test_static_calc_unc(): lat_unc = uncertainties.ufloat(latitude, np.abs(latitude * lat_unc)) lon_unc = uncertainties.ufloat(longitude, np.abs(longitude * lon_unc)) test_unc = [] # empty list to collect return values - for n in xrange(96): + for n in range(96): # Uncertainties wrapped functions must return only scalar float f_ze_unc = uncertainties.wrap( lambda lat, lon: solpos(dt[n], lat, lon)['apparent_zenith'].item() diff --git a/carousel/tests/test_calcs.py.bak b/carousel/tests/test_calcs.py.bak new file mode 100644 index 0000000..f792625 --- /dev/null +++ b/carousel/tests/test_calcs.py.bak @@ -0,0 +1,124 @@ +""" +test calculations +""" + +from nose.tools import ok_, eq_ +from carousel.core.calculations import Calc, CalcParameter +from carousel.core.calculators import Calculator +from carousel.tests import PROJ_PATH, sandia_performance_model +import os +import uncertainties +from pvlib.solarposition import get_solarposition as solpos +import logging +import numpy as np + +LOGGER = logging.getLogger(__name__) + + +def test_calc_metaclass(): + """ + Test calculation class is created with params file using metaclass + """ + + class CalcTest1(Calc): + class Meta: + calcs_file = 'utils.json' + calcs_path = os.path.join(PROJ_PATH, 'calculations') + + calc_test1 = CalcTest1() + ok_(isinstance(calc_test1, Calc)) + eq_(calc_test1.param_file, + os.path.join(PROJ_PATH, 'calculations', 'utils.json')) + + class CalcTest2(Calc): + energy = CalcParameter( + is_dynamic=False, + dependencies=["ac_power", "daterange"], + formula="f_energy", + args={"outputs": {"ac_power": "Pac", "times": "timestamps"}}, + returns=["hourly_energy", "hourly_timeseries"] + ) + monthly_rollup = CalcParameter( + is_dynamic=False, + dependencies=["energy"], + formula="f_rollup", + args={ + "data": {"freq": "MONTHLY"}, + "outputs": {"items": "hourly_energy", + "times": "hourly_timeseries"} + }, + returns=["monthly_energy"] + ) + yearly_rollup = CalcParameter( + is_dynamic=False, + dependencies=["energy"], + formula="f_rollup", + args={"data": {"freq": "YEARLY"}, + "outputs": {"items": "hourly_energy", + "times": "hourly_timeseries"}}, + returns=["annual_energy"] + ) + + calc_test2 = CalcTest2() + ok_(isinstance(calc_test2, Calc)) + for k, v in calc_test1.parameters.iteritems(): + eq_(calc_test2.parameters[k], v) + + +def test_static_calc_unc(): + """ + Test uncertainty propagation in static calculations using Uncertainties. + """ + + # FIXME: this shouldn't have to run a model to test the uncertainty + test_model_file = os.path.join(PROJ_PATH, 'models', + 'sandia_performance_model-Tuscon.json') + test_model = sandia_performance_model.SAPM(test_model_file) # create model + test_model.command('start') # start simulation + # get parameters from model + dt = test_model.outputs.reg['timestamps'] # timestamps + latitude = test_model.data.reg['latitude'].m # latitude [degrees] + longitude = test_model.data.reg['longitude'].m # longitude [degrees] + zenith = test_model.outputs.reg['solar_zenith'].m # zenith [degrees] + s_ze_ze = test_model.outputs.reg.variance['solar_zenith']['solar_zenith'] + azimuth = test_model.outputs.reg['solar_azimuth'].m # azimuth [degrees] + s_az_az = test_model.outputs.reg.variance['solar_azimuth']['solar_azimuth'] + # get uncertainties percentages in base units + lat_unc = test_model.data.reg.uncertainty['latitude']['latitude'] + lat_unc = lat_unc.to_base_units().m + lon_unc = test_model.data.reg.uncertainty['longitude']['longitude'] + lon_unc = lon_unc.to_base_units().m + # create ufloat Uncertainties from parameters + lat_unc = uncertainties.ufloat(latitude, np.abs(latitude * lat_unc)) + lon_unc = uncertainties.ufloat(longitude, np.abs(longitude * lon_unc)) + test_unc = [] # empty list to collect return values + for n in xrange(96): + # Uncertainties wrapped functions must return only scalar float + f_ze_unc = uncertainties.wrap( + lambda lat, lon: solpos(dt[n], lat, lon)['apparent_zenith'].item() + ) + f_az_unc = uncertainties.wrap( + lambda lat, lon: solpos(dt[n], lat, lon)['azimuth'].item() + ) + ze_unc, az_unc = f_ze_unc(lat_unc, lon_unc), f_az_unc(lat_unc, lon_unc) + LOGGER.debug( + '%s: ze = %g +/- %g%%, az = %g +/- %g%%', dt[n].isoformat(), + zenith[n], np.sqrt(s_ze_ze[n]) * 100, + azimuth[n], np.sqrt(s_az_az[n]) * 100 + ) + LOGGER.debug( + 'Uncertainties test %2d: ze = %g +/- %g%%, az = %g +/- %g%%', n, + ze_unc.n, ze_unc.s / ze_unc.n * 100, + az_unc.n, az_unc.s / az_unc.n * 100 + ) + assert np.isclose(zenith[n], ze_unc.n) + assert np.isclose(np.sqrt(s_ze_ze[n]), ze_unc.s / ze_unc.n) + assert np.isclose(azimuth[n], az_unc.n) + assert np.isclose(np.sqrt(s_az_az[n]), az_unc.s / az_unc.n) + test_unc.append({'ze': ze_unc, 'az': az_unc}) + return test_model, test_unc + + +if __name__ == '__main__': + tm, tu = test_static_calc_unc() + test_calc_metaclass() diff --git a/carousel/tests/test_data.py b/carousel/tests/test_data.py index a1dba40..1ff3878 100644 --- a/carousel/tests/test_data.py +++ b/carousel/tests/test_data.py @@ -123,7 +123,7 @@ def __prepare_data__(self): data_test2 = DataSourceTest2(TUSCON) ok_(isinstance(data_test2, DataSource)) - for k, val in data_test1.parameters.iteritems(): + for k, val in data_test1.parameters.items(): eq_(data_test2.parameters[k], val) class DataSourceTest4(DataSource): diff --git a/carousel/tests/test_data.py.bak b/carousel/tests/test_data.py.bak new file mode 100644 index 0000000..a1dba40 --- /dev/null +++ b/carousel/tests/test_data.py.bak @@ -0,0 +1,181 @@ +""" +Test data sources +""" + +from nose.tools import ok_, eq_ +from carousel.tests import logging +from carousel.core import UREG +from carousel.core.data_sources import DataSource, DataParameter +from carousel.core.data_readers import XLRDReader +from carousel.tests import PROJ_PATH, TESTS_DIR +import os + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) +TUSCON = os.path.join(PROJ_PATH, 'data', 'Tuscon.json') +XLRDREADER_TESTDATA = os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx') + + +def test_datasource_metaclass(): + """ + Test data source meta class. + """ + + class DataSourceTest1(DataSource): + """ + Test data source with parameters in file. + """ + class Meta: + data_file = 'pvpower.json' + data_path = os.path.join(PROJ_PATH, 'data') + + def __prepare_data__(self): + pass + + data_test1 = DataSourceTest1(TUSCON) + ok_(isinstance(data_test1, DataSource)) + eq_(data_test1.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json')) + + class DataSourceTest2(DataSource): + """ + Test data source with parameters in code. + """ + latitude = DataParameter(**{ + "description": "latitude", + "units": "degrees", + "isconstant": True, + "dtype": "float", + "uncertainty": 1.0 + }) + longitude = DataParameter(**{ + "description": "longitude", + "units": "degrees", + "isconstant": True, + "dtype": "float", + "uncertainty": 1.0 + }) + elevation = DataParameter(**{ + "description": "altitude of site above sea level", + "units": "meters", + "isconstant": True, + "dtype": "float", + "uncertainty": 1.0 + }) + timestamp_start = DataParameter(**{ + "description": "initial timestamp", + "isconstant": True, + "dtype": "datetime" + }) + timestamp_count = DataParameter(**{ + "description": "number of timesteps", + "isconstant": True, + "dtype": "int" + }) + module = DataParameter(**{ + "description": "PV module", + "isconstant": True, + "dtype": "str" + }) + inverter = DataParameter(**{ + "description": "PV inverter", + "isconstant": True, + "dtype": "str" + }) + module_database = DataParameter(**{ + "description": "module databases", + "isconstant": True, + "dtype": "str" + }) + inverter_database = DataParameter(**{ + "description": "inverter database", + "isconstant": True, + "dtype": "str" + }) + Tamb = DataParameter(**{ + "description": "average yearly ambient air temperature", + "units": "degC", + "isconstant": True, + "dtype": "float", + "uncertainty": 1.0 + }) + Uwind = DataParameter(**{ + "description": "average yearly wind speed", + "units": "m/s", + "isconstant": True, + "dtype": "float", + "uncertainty": 1.0 + }) + surface_azimuth = DataParameter(**{ + "description": "site rotation", + "units": "degrees", + "isconstant": True, + "dtype": "float", + "uncertainty": 1.0 + }) + timezone = DataParameter(**{ + "description": "timezone", + "isconstant": True, + "dtype": "str" + }) + + def __prepare_data__(self): + pass + + data_test2 = DataSourceTest2(TUSCON) + ok_(isinstance(data_test2, DataSource)) + for k, val in data_test1.parameters.iteritems(): + eq_(data_test2.parameters[k], val) + + class DataSourceTest4(DataSource): + """ + Test data source with parameters in file. + """ + latitude = DataParameter(**{ + "description": "latitude", + "units": "radians", + "isconstant": True, + "dtype": "float", + "uncertainty": 1.0 + }) + + class Meta: + data_file = 'pvpower.json' + data_path = os.path.join(PROJ_PATH, 'data') + + def __prepare_data__(self): + pass + + + data_test4 = DataSourceTest4(TUSCON) + ok_(isinstance(data_test4, DataSource)) + eq_(data_test4['latitude'].u, UREG.radians) + eq_(data_test4.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json')) + + +def test_xlrdreader_datasource(): + """ + Test data source with xlrd reader. + """ + + class DataSourceTest3(DataSource): + """ + Test data source with xlrd reader and params in file. + """ + class Meta: + data_reader = XLRDReader + data_file = 'xlrdreader_param.json' + data_path = TESTS_DIR + + def __prepare_data__(self): + pass + + data_test3 = DataSourceTest3(XLRDREADER_TESTDATA) + ok_(isinstance(data_test3, DataSource)) + eq_(data_test3._meta.data_reader, XLRDReader) + os.remove(os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx.json')) + LOGGER.debug('xlrdreader_testdata.xlsx.json has been cleaned') + + +if __name__ == '__main__': + test_datasource_metaclass() + test_xlrdreader_datasource() diff --git a/carousel/tests/test_formulas.py b/carousel/tests/test_formulas.py index cfba426..988ade6 100644 --- a/carousel/tests/test_formulas.py +++ b/carousel/tests/test_formulas.py @@ -44,7 +44,7 @@ class Meta: formulas_test2 = FormulaTest2() ok_(isinstance(formulas_test2, Formula)) - for k, v in formulas_test2.parameters.iteritems(): + for k, v in formulas_test2.parameters.items(): eq_(formulas_test1.parameters[k], v) diff --git a/carousel/tests/test_formulas.py.bak b/carousel/tests/test_formulas.py.bak new file mode 100644 index 0000000..cfba426 --- /dev/null +++ b/carousel/tests/test_formulas.py.bak @@ -0,0 +1,95 @@ +""" +test formulas +""" + +from nose.tools import ok_, eq_ +import numpy as np +from carousel.core import UREG +from carousel.core.formulas import ( + Formula, NumericalExpressionImporter, FormulaParameter +) +from carousel.tests import PROJ_PATH +import os + + +def test_formulas_metaclass(): + """ + Test Formulas + """ + + class FormulaTest1(Formula): + class Meta: + formulas_file = 'utils.json' + formulas_path = os.path.join(PROJ_PATH, 'formulas') + + formulas_test1 = FormulaTest1() + ok_(isinstance(formulas_test1, Formula)) + eq_(formulas_test1.param_file, + os.path.join(PROJ_PATH, 'formulas', 'utils.json')) + + class FormulaTest2(Formula): + f_daterange = FormulaParameter() + f_energy = FormulaParameter( + args=["ac_power", "times"], + units=[["watt_hour", None], ["W", None]] + ) + f_rollup = FormulaParameter( + args=["items", "times", "freq"], + units=["=A", ["=A", None, None]] + ) + + class Meta: + module = ".utils" + package = "formulas" + + formulas_test2 = FormulaTest2() + ok_(isinstance(formulas_test2, Formula)) + for k, v in formulas_test2.parameters.iteritems(): + eq_(formulas_test1.parameters[k], v) + + +def test_numexpr_formula(): + """ + Test formulas imported using ``numexpr`` + """ + + class NumexprFormula(Formula): + f_hypotenuse = FormulaParameter( + expression='sqrt(a * a + b * b)', + args=['a', 'b'], + units=[('=A', ), ('=A', '=A')], + isconstant=[] + ) + + class Meta: + formula_importer = NumericalExpressionImporter + + numexpr_formula = NumexprFormula() + ok_(isinstance(numexpr_formula, Formula)) + unc = 0.1 # uncertainty + var = unc**2 # variance + cov = np.array([[[var, 0], [0, var]], [[var, 0], [0, var]]]) + a = [3.0, 12.0] * UREG.cm + b = [4.0, 5.0] * UREG.cm + f_hypotenuse = numexpr_formula.formulas['f_hypotenuse'] + c, c_unc, c_jac = f_hypotenuse(a, b, __covariance__=cov) + assert np.allclose(c.m, np.array([5.0, 13.0])) + eq_(c.u, UREG.centimeter) + # import sympy + # x, y = sympy.symbols('x, y') + # z = sympy.sqrt(x * x + y * y) + # fx, fy = z.diff(x), z.diff(y) + # fx, fy = x/sqrt(x**2 + y**2), y/sqrt(x**2 + y**2) + # fx, fy = x/z, y/z + # dz = sqrt(fx**2 * dx**2 + fy**2 * dy**2) + da, db = a.m * unc, b.m * unc + fa, fb = a.m / c.m, b.m / c.m + dc = (fa ** 2 * da ** 2) + (fb ** 2 * db ** 2) + assert np.allclose(c_unc.squeeze(), dc) + fc = np.array([fa, fb]).T + assert np.allclose(c_jac.squeeze(), fc) + return numexpr_formula + + +if __name__ == '__main__': + f = test_numexpr_formula() diff --git a/carousel/tests/test_outputs.py b/carousel/tests/test_outputs.py index f7792bd..53d63d0 100644 --- a/carousel/tests/test_outputs.py +++ b/carousel/tests/test_outputs.py @@ -36,5 +36,5 @@ class OutputTest2(Output): out_src_test2 = OutputTest2() ok_(isinstance(out_src_test2, Output)) - for k, v in out_src_test2.parameters.iteritems(): + for k, v in out_src_test2.parameters.items(): eq_(out_src_test1.parameters[k], v) diff --git a/carousel/tests/test_outputs.py.bak b/carousel/tests/test_outputs.py.bak new file mode 100644 index 0000000..f7792bd --- /dev/null +++ b/carousel/tests/test_outputs.py.bak @@ -0,0 +1,40 @@ +""" +test outputs +""" + +from nose.tools import ok_, eq_ +from carousel.core.outputs import Output +from carousel.tests import PROJ_PATH +import os + + +def test_outputs_metaclass(): + """ + Test Output Sources + """ + + class OutputTest1(Output): + class Meta: + outputs_file = 'pvpower.json' + outputs_path = os.path.join(PROJ_PATH, 'outputs') + + out_src_test1 = OutputTest1() + ok_(isinstance(out_src_test1, Output)) + eq_(out_src_test1.param_file, + os.path.join(PROJ_PATH, 'outputs', 'pvpower.json')) + + class OutputTest2(Output): + timestamps = {"isconstant": True, "size": 8761} + hourly_energy = { + "isconstant": True, + "timeseries": "hourly_timeseries", "units": "Wh", + "size": 8760 + } + hourly_timeseries = {"isconstant": True, "units": "Wh", "size": 8760} + monthly_energy = {"isconstant": True, "units": "Wh", "size": 12} + annual_energy = {"isconstant": True, "units": "Wh"} + + out_src_test2 = OutputTest2() + ok_(isinstance(out_src_test2, Output)) + for k, v in out_src_test2.parameters.iteritems(): + eq_(out_src_test1.parameters[k], v) diff --git a/carousel/tests/test_sim.py b/carousel/tests/test_sim.py index 76ca7b2..7ca6b6f 100644 --- a/carousel/tests/test_sim.py +++ b/carousel/tests/test_sim.py @@ -46,7 +46,7 @@ class Meta: data_reader = ArgumentReader def __prepare_data__(self): - keys = self.parameters.keys() + keys = list(self.parameters.keys()) for k in keys: if k.endswith('_unc'): unc = self.data.pop(k) diff --git a/carousel/tests/test_sim.py.bak b/carousel/tests/test_sim.py.bak new file mode 100644 index 0000000..76ca7b2 --- /dev/null +++ b/carousel/tests/test_sim.py.bak @@ -0,0 +1,141 @@ +""" +Simulation tests. +""" + +from carousel.core import logging, UREG +from carousel.core.models import Model, ModelParameter +from carousel.core.data_sources import DataParameter, DataSource +from carousel.core.formulas import FormulaParameter, Formula +from carousel.core.simulations import SimParameter, Simulation +from carousel.core.outputs import OutputParameter, Output +from carousel.core.calculations import Calc, CalcParameter +from carousel.contrib.readers import ArgumentReader +from carousel.tests import PROJ_PATH +import numpy as np +import os +import sympy + +LOGGER = logging.getLogger(__name__) + + +def test_make_sim_metaclass(): + """ + Test setting the simulation parameter file as class attributes versus + specifying the simulation parameter file in the model parameter file. + + :return: simulation + """ + + class SimTest1(Simulation): + class Meta: + sim_file = 'Tuscon.json' + sim_path = os.path.join(PROJ_PATH, 'simulations', 'Standalone') + + sim_test1 = SimTest1() + return sim_test1 + + +class PythagorasData(DataSource): + a = DataParameter(**{'units': 'cm', 'argpos': 0}) + b = DataParameter(**{'units': 'cm', 'argpos': 2}) + a_unc = DataParameter(**{'units': 'cm', 'argpos': 1}) + b_unc = DataParameter(**{'units': 'cm', 'argpos': 3}) + + class Meta: + data_cache_enabled = False + data_reader = ArgumentReader + + def __prepare_data__(self): + keys = self.parameters.keys() + for k in keys: + if k.endswith('_unc'): + unc = self.data.pop(k) + self.data_source.pop(k) + kunc = k[:-4] + v = self.data[kunc] + if not unc.dimensionless: + unc = (unc / v) + # raises dimensionality error if not dimensionless + self.uncertainty[kunc] = {kunc: unc.to(UREG.percent)} + else: + self.isconstant[k] = True + + +class PythagorasOutput(Output): + c = OutputParameter(**{'units': 'cm', 'isconstant': True}) + + +def f_hypotenuse(a, b): + a, b = np.atleast_1d(a), np.atleast_1d(b) + return np.sqrt(a * a + b * b).reshape(1, -1) + + +class PythagorasFormula(Formula): + f_hypotenuse = FormulaParameter( + args=['a', 'b'], + units=[('=A', ), ('=A', '=A')], + isconstant=[] + ) + + class Meta: + module = 'carousel.tests.test_sim' + + +class PythagorasCalc(Calc): + pythagorean_thm = CalcParameter( + is_dynamic=False, + formula='f_hypotenuse', + args={'data': {'a': 'a', 'b': 'b'}}, + returns=['c'] + ) + + +class PythagorasSim(Simulation): + settings = SimParameter( + ID='Pythagorean Theorem', + commands=['start', 'load', 'run', 'pause'], + path='~/Carousel_Tests', + thresholds=None, + interval=[1, 'hour'], + sim_length=[0, 'hour'], + write_frequency=1, + write_fields={'data': ['a', 'b'], 'outputs': ['c']}, + display_frequency=1, + display_fields={'data': ['a', 'b'], 'outputs': ['c']}, + ) + + +class PythagorasModel(Model): + data = ModelParameter(sources=[PythagorasData]) + outputs = ModelParameter(sources=[PythagorasOutput]) + formulas = ModelParameter(sources=[PythagorasFormula]) + calculations = ModelParameter(sources=[PythagorasCalc]) + simulations = ModelParameter(sources=[PythagorasSim]) + + class Meta: + modelpath = os.path.dirname(__file__) + + +def test_call_sim_with_args(): + a, a_unc, b, b_unc = 3.0, 0.1, 4.0, 0.1 + c = f_hypotenuse(a, b) + m1 = PythagorasModel() + data = {'PythagorasData': {'a': a, 'b': b, 'a_unc': a_unc, 'b_unc': b_unc}} + m1.command('run', data=data) + assert m1.registries['outputs']['c'].m == c + assert m1.registries['outputs']['c'].u == UREG.cm + x, y = sympy.symbols('x, y') + z = sympy.sqrt(x * x + y * y) + fx = sympy.lambdify((x, y), z.diff(x)) + fy = sympy.lambdify((x, y), z.diff(y)) + dz = np.sqrt(fx(a, b) ** 2 * a_unc ** 2 + fy(a, b) ** 2 * b_unc ** 2) + c_unc = c * np.sqrt(m1.registries['outputs'].variance['c']['c']) + LOGGER.debug('uncertainty in c is %g', c_unc) + assert np.isclose(dz, c_unc.item()) + c_unc = c * m1.registries['outputs'].uncertainty['c']['c'].to('fraction') + assert np.isclose(dz, c_unc.m.item()) + return m1 + + +if __name__ == '__main__': + m = test_call_sim_with_args() diff --git a/examples/PVPower/formulas/utils.py b/examples/PVPower/formulas/utils.py index ce7088d..e9644b1 100644 --- a/examples/PVPower/formulas/utils.py +++ b/examples/PVPower/formulas/utils.py @@ -69,7 +69,7 @@ def groupby_freq(items, times, freq, wkst='SU'): :type wkst: str :return: generator """ - timeseries = zip(times, items) # timeseries map of items + timeseries = list(zip(times, items)) # timeseries map of items # create a key lambda to group timeseries by if freq.upper() == 'DAILY': def key(ts_): return ts_[0].day @@ -77,7 +77,7 @@ def key(ts_): return ts_[0].day weekday = getattr(rrule, wkst.upper()) # weekday start # generator that searches times for weekday start days = (day for day in times if day.weekday() == weekday.weekday) - day0 = days.next() # first weekday start of all times + day0 = next(days) # first weekday start of all times def key(ts_): return (ts_[0] - day0).days // 7 else: diff --git a/examples/PVPower/formulas/utils.py.bak b/examples/PVPower/formulas/utils.py.bak new file mode 100644 index 0000000..ce7088d --- /dev/null +++ b/examples/PVPower/formulas/utils.py.bak @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- + +""" +This module contains formulas for calculating PV power. +""" + +import numpy as np +from scipy import constants as sc_const +import itertools +from dateutil import rrule +import pytz + + +def f_daterange(freq, tz='UTC', *args, **kwargs): + """ + Use ``dateutil.rrule`` to create a range of dates. The frequency must be a + string in the following list: YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, + MINUTELY or SECONDLY. + + See `dateutil rrule`_ documentation for more detail. + + .. _dateutil rrule: https://dateutil.readthedocs.org/en/latest/rrule.html + + :param freq: One of the ``dateutil.rrule`` frequencies + :type freq: str + :param tz: One of the ``pytz`` timezones, defaults to UTC + :type tz: str + :param args: start date , interval between each frequency , + max number of recurrences , end date + :param kwargs: ``dtstart``, ``interval``, ``count``, ``until`` + :return: range of dates + :rtype: list + """ + tz = pytz.timezone(tz) + freq = getattr(rrule, freq.upper()) # get frequency enumeration from rrule + return [tz.localize(dt) for dt in rrule.rrule(freq, *args, **kwargs)] + + +def f_energy(ac_power, times): + """ + Calculate the total energy accumulated from AC power at the end of each + timestep between the given times. + + :param ac_power: AC Power [W] + :param times: times + :type times: np.datetime64[s] + :return: energy [W*h] and energy times + """ + dt = np.diff(times) # calculate timesteps + # convert timedeltas to quantities + dt = dt.astype('timedelta64[s]').astype('float') / sc_const.hour + # energy accumulate during timestep + energy = dt * (ac_power[:-1] + ac_power[1:]) / 2 + return energy, times[1:] + + +def groupby_freq(items, times, freq, wkst='SU'): + """ + Group timeseries by frequency. The frequency must be a string in the + following list: YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY or + SECONDLY. The optional weekstart must be a string in the following list: + MO, TU, WE, TH, FR, SA and SU. + + :param items: items in timeseries + :param times: times corresponding to items + :param freq: One of the ``dateutil.rrule`` frequency constants + :type freq: str + :param wkst: One of the ``dateutil.rrule`` weekday constants + :type wkst: str + :return: generator + """ + timeseries = zip(times, items) # timeseries map of items + # create a key lambda to group timeseries by + if freq.upper() == 'DAILY': + def key(ts_): return ts_[0].day + elif freq.upper() == 'WEEKLY': + weekday = getattr(rrule, wkst.upper()) # weekday start + # generator that searches times for weekday start + days = (day for day in times if day.weekday() == weekday.weekday) + day0 = days.next() # first weekday start of all times + + def key(ts_): return (ts_[0] - day0).days // 7 + else: + def key(ts_): return getattr(ts_[0], freq.lower()[:-2]) + for k, ts in itertools.groupby(timeseries, key): + yield k, ts + + +def f_rollup(items, times, freq): + """ + Use :func:`groupby_freq` to rollup items + + :param items: items in timeseries + :param times: times corresponding to items + :param freq: One of the ``dateutil.rrule`` frequency constants + :type freq: str + """ + rollup = [np.sum(item for __, item in ts) + for _, ts in groupby_freq(items, times, freq)] + return np.array(rollup) diff --git a/examples/PVPower/pvpower/sandia_perfmod_newstyle.py b/examples/PVPower/pvpower/sandia_perfmod_newstyle.py index 3d637ef..f840e31 100644 --- a/examples/PVPower/pvpower/sandia_perfmod_newstyle.py +++ b/examples/PVPower/pvpower/sandia_perfmod_newstyle.py @@ -45,7 +45,7 @@ def __prepare_data__(self): self.data[k] = k self.isconstant[k] = True # apply metadata - for k, v in parameters.iteritems(): + for k, v in parameters.items(): # TODO: this should be applied in data reader using _meta_names from # data registry which should use a meta class and all parameter # files should have same layout even xlrd and numpy readers, etc. diff --git a/examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak b/examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak new file mode 100644 index 0000000..3d637ef --- /dev/null +++ b/examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak @@ -0,0 +1,509 @@ +""" +New Style Carousel Sandia Performance Model +""" + +from carousel.core.data_sources import DataSourceBase, DataSource, DataParameter +from carousel.core.formulas import Formula, FormulaParameter +from carousel.core.calculations import Calc, CalcParameter +from carousel.core.calculators import Calculator +from carousel.core.outputs import Output, OutputParameter +from carousel.core.simulations import Simulation, SimParameter +from carousel.core.models import Model, ModelParameter +from carousel.core import UREG +from datetime import datetime +import pvlib +import os +from pvpower import PROJ_PATH + +SANDIA_MODULES = os.path.join(PROJ_PATH, 'Sandia Modules.csv') +CEC_MODULES = os.path.join(PROJ_PATH, 'CEC Modules.csv') +CEC_INVERTERS = os.path.join(PROJ_PATH, 'CEC Inverters.csv') + + +class PVPowerData(DataSource): + """ + Data sources for PV Power demo. + """ + latitude = DataParameter(units="degrees", uncertainty=1.0) + longitude = DataParameter(units="degrees", uncertainty=1.0) + elevation = DataParameter(units="meters", uncertainty=1.0) + timestamp_start = DataParameter() + timestamp_count = DataParameter() + module = DataParameter() + inverter = DataParameter() + module_database = DataParameter() + inverter_database = DataParameter() + Tamb = DataParameter(units="degC", uncertainty=1.0) + Uwind = DataParameter(units="m/s", uncertainty=1.0) + surface_azimuth = DataParameter(units="degrees", uncertainty=1.0) + timezone = DataParameter() + + def __prepare_data__(self): + parameters = getattr(self, DataSourceBase._param_attr) + # set frequencies + for k in ('HOURLY', 'MONTHLY', 'YEARLY'): + self.data[k] = k + self.isconstant[k] = True + # apply metadata + for k, v in parameters.iteritems(): + # TODO: this should be applied in data reader using _meta_names from + # data registry which should use a meta class and all parameter + # files should have same layout even xlrd and numpy readers, etc. + self.isconstant[k] = True # set all data "isconstant" True + # uncertainty is dictionary + if 'uncertainty' in v: + self.uncertainty[k] = {k: v['uncertainty'] * UREG.percent} + # convert initial timestamp to datetime + self.data['timestamp_start'] = datetime(*self.data['timestamp_start']) + # get module and inverter databases + self.data['module_database'] = pvlib.pvsystem.retrieve_sam( + self.data['module_database'], path=SANDIA_MODULES + ) + self.data['inverter_database'] = pvlib.pvsystem.retrieve_sam( + self.data['inverter_database'], path=CEC_INVERTERS + ) + # get module and inverter + self.data['module'] = self.data['module_database'][self.data['module']] + self.data['inverter'] = ( + self.data['inverter_database'][self.data['inverter']] + ) + + +class UtilityFormulas(Formula): + """ + Formulas for PV Power demo + """ + f_daterange = FormulaParameter() + f_energy = FormulaParameter( + args=["ac_power", "times"], + units=[["watt_hour", None], ["W", None]] + ) + f_rollup = FormulaParameter( + args=["items", "times", "freq"], + units=["=A", ["=A", None, None]] + ) + + class Meta: + module = ".utils" + package = "formulas" + + +class PerformanceFormulas(Formula): + """ + Formulas for performance calcs + """ + f_ac_power = FormulaParameter( + args=["inverter", "v_mp", "p_mp"], + units=["W", [None, "V", "W"]] + ) + f_dc_power = FormulaParameter( + args=["effective_irradiance", "cell_temp", "module"], + units=[["A", "A", "V", "V", "W"], ["suns", "degC", None]] + ) + f_effective_irradiance = FormulaParameter( + args=["poa_direct", "poa_diffuse", "am_abs", "aoi", "module"], + units=["suns", ["W/m**2", "W/m**2", "dimensionless", "deg", None]] + ) + f_cell_temp = FormulaParameter( + args=["poa_global", "wind_speed", "air_temp"], + units=[["degC", "degC"], ["W/m**2", "m/s", "degC"]] + ) + f_aoi = FormulaParameter( + args=["surface_tilt", "surface_azimuth", "solar_zenith", + "solar_azimuth"], + units=["deg", ["deg", "deg", "deg", "deg"]] + ) + + class Meta: + module = ".performance" + package = "formulas" + + +class IrradianceFormulas(Formula): + """ + Formulas for irradiance calcs + """ + f_linketurbidity = FormulaParameter( + args=["times", "latitude", "longitude"], + units=["dimensionless", [None, "deg", "deg"]], + isconstant=["times"] + ) + f_clearsky = FormulaParameter( + args=["solar_zenith", "am_abs", "tl", "dni_extra", "altitude"], + units=[ + ["W/m**2", "W/m**2", "W/m**2"], + ["deg", "dimensionless", "dimensionless", "W/m**2", "m"] + ], + isconstant=["dni_extra"] + ) + f_solpos = FormulaParameter( + args=["times", "latitude", "longitude"], + units=[["degree", "degree"], [None, "degree", "degree"]], + isconstant=["times"] + ) + f_dni_extra = FormulaParameter(args=["times"], units=["W/m**2", [None]]) + f_airmass = FormulaParameter( + args=["solar_zenith"], units=["dimensionless", ["deg"]], + isconstant=[] + ) + f_pressure = FormulaParameter( + args=["altitude"], units=["Pa", ["m"]], isconstant=[] + ) + f_am_abs = FormulaParameter( + args=["airmass", "pressure"], + units=["dimensionless", ["dimensionless", "Pa"]], + isconstant=[] + ) + f_total_irrad = FormulaParameter( + args=[ + "times", "surface_tilt", "surface_azimuth", "solar_zenith", + "solar_azimuth", "dni", "ghi", "dhi", "dni_extra", "am_abs" + ], + units=[ + ["W/m**2", "W/m**2", "W/m**2"], + [ + None, "deg", "deg", "deg", "deg", "W/m**2", "W/m**2", + "W/m**2", + "W/m**2", "dimensionless" + ] + ], + isconstant=["times", "dni_extra"] + ) + + class Meta: + module = ".irradiance" + package = "formulas" + + +class UtilityCalcs(Calc): + """ + Calculations for PV Power demo + """ + energy = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["ac_power", "daterange"], + formula="f_energy", + args={"outputs": {"ac_power": "Pac", "times": "timestamps"}}, + returns=["hourly_energy", "hourly_timeseries"] + ) + monthly_rollup = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["energy"], + formula="f_rollup", + args={ + "data": {"freq": "MONTHLY"}, + "outputs": {"items": "hourly_energy", + "times": "hourly_timeseries"} + }, + returns=["monthly_energy"] + ) + yearly_rollup = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["energy"], + formula="f_rollup", + args={"data": {"freq": "YEARLY"}, + "outputs": {"items": "hourly_energy", + "times": "hourly_timeseries"}}, + returns=["annual_energy"] + ) + + +class PerformanceCalcs(Calc): + """ + Calculations for performance + """ + aoi = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["solpos"], + formula="f_aoi", + args={"data": {"surface_tilt": "latitude", + "surface_azimuth": "surface_azimuth"}, + "outputs": {"solar_zenith": "solar_zenith", + "solar_azimuth": "solar_azimuth"}}, + returns=["aoi"] + ) + cell_temp = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["total_irradiance"], + formula="f_cell_temp", + args={"data": {"wind_speed": "Uwind", "air_temp": "Tamb"}, + "outputs": {"poa_global": "poa_global"}}, + returns=["Tcell", "Tmod"] + ) + effective_irradiance = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["total_irradiance", "aoi", "abs_airmass"], + formula="f_effective_irradiance", + args={"data": {"module": "module"}, + "outputs": {"poa_direct": "poa_direct", + "poa_diffuse": "poa_diffuse", "am_abs": "am_abs", + "aoi": "aoi"}}, + returns=["Ee"] + ) + dc_power = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["effective_irradiance", "cell_temp"], + formula="f_dc_power", + args={"data": {"module": "module"}, + "outputs": {"effective_irradiance": "Ee", "cell_temp": "Tcell"}}, + returns=["Isc", "Imp", "Voc", "Vmp", "Pmp"] + ) + ac_power = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["dc_power"], + formula="f_ac_power", + args={"data": {"inverter": "inverter"}, + "outputs": {"v_mp": "Vmp", "p_mp": "Pmp"}}, + returns=["Pac"] + ) + + +class IrradianceCalcs(Calc): + """ + Calculations for irradiance + """ + daterange = CalcParameter( + is_dynamic=False, + calculator=Calculator, + formula="f_daterange", + args={"data": {"freq": "HOURLY", "dtstart": "timestamp_start", + "count": "timestamp_count", "tz": "timezone"}}, + returns=["timestamps"] + ) + solpos = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["daterange"], + formula="f_solpos", + args={"data": {"latitude": "latitude", "longitude": "longitude"}, + "outputs": {"times": "timestamps"}}, + returns=["solar_zenith", "solar_azimuth"] + ) + extraterrestrial = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["daterange"], + formula="f_dni_extra", + args={"outputs": {"times": "timestamps"}}, + returns=["extraterrestrial"] + ) + airmass = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["solpos"], + formula="f_airmass", + args={"outputs": {"solar_zenith": "solar_zenith"}}, + returns=["airmass"] + ) + pressure = CalcParameter( + is_dynamic=False, + calculator=Calculator, + formula="f_pressure", + args={"data": {"altitude": "elevation"}}, + returns=["pressure"] + ) + abs_airmass = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["airmass", "pressure"], + formula="f_am_abs", + args={"outputs": {"airmass": "airmass", "pressure": "pressure"}}, + returns=["am_abs"] + ) + linke_turbidity = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=["daterange"], + formula="f_linketurbidity", + args={"data": {"latitude": "latitude", "longitude": "longitude"}, + "outputs": {"times": "timestamps"}}, + returns=["tl"] + ) + clearsky = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=[ + "solpos", "abs_airmass", "linke_turbidity", "extraterrestrial" + ], + formula="f_clearsky", + args={"data": {"altitude": "elevation"}, + "outputs": {"solar_zenith": "solar_zenith", "am_abs": "am_abs", + "tl": "tl", "dni_extra": "extraterrestrial"}}, + returns=["dni", "ghi", "dhi"] + ) + total_irradiance = CalcParameter( + is_dynamic=False, + calculator=Calculator, + dependencies=[ + "daterange", "solpos", "clearsky", "extraterrestrial", + "abs_airmass" + ], + formula="f_total_irrad", + args={ + "data": { + "surface_tilt": "latitude", "surface_azimuth": "surface_azimuth" + }, + "outputs": { + "times": "timestamps", "solar_zenith": "solar_zenith", + "solar_azimuth": "solar_azimuth", "dni": "dni", + "ghi": "ghi", "dhi": "dhi", "dni_extra": "extraterrestrial", + "am_abs": "am_abs" + } + }, + returns=["poa_global", "poa_direct", "poa_diffuse"] + ) + + +class PVPowerOutputs(Output): + """ + Outputs for PV Power demo + """ + timestamps = OutputParameter(isconstant=True, size=8761) + hourly_energy = OutputParameter( + isconstant=True, timeseries="hourly_timeseries", units="Wh", size=8760 + ) + hourly_timeseries = OutputParameter(isconstant=True, units="Wh", size=8760) + monthly_energy = OutputParameter(isconstant=True, units="Wh", size=12) + annual_energy = OutputParameter(isconstant=True, units="Wh") + + +class PerformanceOutputs(Output): + """ + Performance outputs for PV Power demo + """ + Pac = OutputParameter( + isconstant=True, timeseries="timestamps", units="W", size=8761 + ) + Isc = OutputParameter( + isconstant=True, timeseries="timestamps", units="A", size=8761 + ) + Imp = OutputParameter( + isconstant=True, timeseries="timestamps", units="A", size=8761 + ) + Voc = OutputParameter( + isconstant=True, timeseries="timestamps", units="V", size=8761 + ) + Vmp = OutputParameter( + isconstant=True, timeseries="timestamps", units="V", size=8761 + ) + Pmp = OutputParameter( + isconstant=True, timeseries="timestamps", units="W", size=8761 + ) + Ee = OutputParameter( + isconstant=True, timeseries="timestamps", units="dimensionless", + size=8761 + ) + Tcell = OutputParameter( + isconstant=True, timeseries="timestamps", units="degC", size=8761 + ) + Tmod = OutputParameter( + isconstant=True, timeseries="timestamps", units="degC", size=8761 + ) + + +class IrradianceOutputs(Output): + """ + Irradiance outputs for PV Power demo + """ + tl = OutputParameter( + isconstant=True, timeseries="timestamps", units="dimensionless", + size=8761 + ) + poa_global = OutputParameter( + isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 + ) + poa_direct = OutputParameter( + isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 + ) + poa_diffuse = OutputParameter( + isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 + ) + aoi = OutputParameter( + isconstant=True, timeseries="timestamps", units="deg", size=8761 + ) + solar_zenith = OutputParameter( + isconstant=True, timeseries="timestamps", units="deg", size=8761 + ) + solar_azimuth = OutputParameter( + isconstant=True, timeseries="timestamps", units="deg", size=8761 + ) + pressure = OutputParameter( + isconstant=True, timeseries="timestamps", units="Pa", size=1 + ) + airmass = OutputParameter( + isconstant=True, timeseries="timestamps", units="dimensionless", + size=8761 + ) + am_abs = OutputParameter( + isconstant=True, timeseries="timestamps", units="dimensionless", + size=8761 + ) + extraterrestrial = OutputParameter( + isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 + ) + dni = OutputParameter( + isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 + ) + dhi = OutputParameter( + isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 + ) + ghi = OutputParameter( + isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 + ) + + +class PVPowerSim(Simulation): + """ + PV Power Demo Simulations + """ + settings = SimParameter( + ID="Tuscon_SAPM", + path="~/Carousel_Simulations", + thresholds=None, + interval=[1, "hour"], + sim_length=[0, "hours"], + write_frequency=0, + write_fields={ + "data": ["latitude", "longitude", "Tamb", "Uwind"], + "outputs": ["monthly_energy", "annual_energy"] + }, + display_frequency=12, + display_fields={ + "data": ["latitude", "longitude", "Tamb", "Uwind"], + "outputs": ["monthly_energy", "annual_energy"] + }, + commands=['start', 'pause'] + ) + + +class NewSAPM(Model): + """ + PV Power Demo model + """ + data = ModelParameter( + layer='Data', sources=[(PVPowerData, {'filename': 'Tuscon.json'})] + ) + outputs = ModelParameter( + layer='Outputs', + sources=[PVPowerOutputs, PerformanceOutputs, IrradianceOutputs] + ) + formulas = ModelParameter( + layer='Formulas', + sources=[UtilityFormulas, PerformanceFormulas, IrradianceFormulas] + ) + calculations = ModelParameter( + layer='Calculations', + sources=[UtilityCalcs, PerformanceCalcs, IrradianceCalcs] + ) + simulations = ModelParameter(layer='Simulations', sources=[PVPowerSim]) + + class Meta: + modelpath = PROJ_PATH # folder containing project, not model diff --git a/examples/PVPower/pvpower/sandia_performance_model.py b/examples/PVPower/pvpower/sandia_performance_model.py index de5f2e7..3191745 100644 --- a/examples/PVPower/pvpower/sandia_performance_model.py +++ b/examples/PVPower/pvpower/sandia_performance_model.py @@ -38,7 +38,7 @@ def __prepare_data__(self): self.data[k] = k self.isconstant[k] = True # apply metadata - for k, v in parameters.iteritems(): + for k, v in parameters.items(): # TODO: this should be applied in data reader using _meta_names from # data registry which should use a meta class and all parameter # files should have same layout even xlrd and numpy readers, etc. diff --git a/examples/PVPower/pvpower/sandia_performance_model.py.bak b/examples/PVPower/pvpower/sandia_performance_model.py.bak new file mode 100644 index 0000000..de5f2e7 --- /dev/null +++ b/examples/PVPower/pvpower/sandia_performance_model.py.bak @@ -0,0 +1,167 @@ +""" +Sandia Performance Model +""" + +from carousel.core.data_sources import DataSource +from carousel.core.formulas import Formula +from carousel.core.calculations import Calc +from carousel.core.calculators import Calculator +from carousel.core.outputs import Output +from carousel.core.simulations import Simulation +from carousel.core.models import Model +from carousel.core import UREG +from datetime import datetime +import pvlib +import os +from pvpower import PROJ_PATH + +CALC_PATH = os.path.join(PROJ_PATH, 'calculations') +FORMULA_PATH = os.path.join(PROJ_PATH, 'formulas') +DATA_PATH = os.path.join(PROJ_PATH, 'data') +SANDIA_MODULES = os.path.join(PROJ_PATH, 'Sandia Modules.csv') +CEC_MODULES = os.path.join(PROJ_PATH, 'CEC Modules.csv') +CEC_INVERTERS = os.path.join(PROJ_PATH, 'CEC Inverters.csv') + + +class PVPowerData(DataSource): + """ + Data sources for PV Power demo. + """ + class Meta: + data_file = 'pvpower.json' + data_path = DATA_PATH + + def __prepare_data__(self): + parameters = getattr(self, 'parameters') + # set frequencies + for k in ('HOURLY', 'MONTHLY', 'YEARLY'): + self.data[k] = k + self.isconstant[k] = True + # apply metadata + for k, v in parameters.iteritems(): + # TODO: this should be applied in data reader using _meta_names from + # data registry which should use a meta class and all parameter + # files should have same layout even xlrd and numpy readers, etc. + if 'isconstant' in v: + self.isconstant[k] = v['isconstant'] + # uncertainty is dictionary + if 'uncertainty' in v: + self.uncertainty[k] = {k: v['uncertainty'] * UREG.percent} + # convert initial timestamp to datetime + self.data['timestamp_start'] = datetime(*self.data['timestamp_start']) + # get module and inverter databases + self.data['module_database'] = pvlib.pvsystem.retrieve_sam( + self.data['module_database'], path=SANDIA_MODULES + ) + self.data['inverter_database'] = pvlib.pvsystem.retrieve_sam( + self.data['inverter_database'], path=CEC_INVERTERS + ) + # get module and inverter + self.data['module'] = self.data['module_database'][self.data['module']] + self.data['inverter'] = ( + self.data['inverter_database'][self.data['inverter']] + ) + + +class UtilityFormulas(Formula): + """ + Formulas for PV Power demo + """ + class Meta: + formulas_file = 'utils.json' + formulas_path = FORMULA_PATH + + +class PerformanceFormulas(Formula): + """ + Formulas for performance calcs + """ + class Meta: + formulas_file = 'performance.json' + formulas_path = FORMULA_PATH + + +class IrradianceFormulas(Formula): + """ + Formulas for irradiance calcs + """ + class Meta: + formulas_file = 'irradiance.json' + formulas_path = FORMULA_PATH + + +class UtilityCalcs(Calc): + """ + Calculations for PV Power demo + """ + class Meta: + calcs_file = 'utils.json' + calcs_path = CALC_PATH + dependencies = ['PerformanceCalcs'] + calculator = Calculator + is_dynamic = False + + +class PerformanceCalcs(Calc): + """ + Calculations for performance + """ + class Meta: + calcs_file = 'performance.json' + calcs_path = CALC_PATH + dependencies = ['IrradianceCalcs'] + calculator = Calculator + is_dynamic = False + + +class IrradianceCalcs(Calc): + """ + Calculations for irradiance + """ + class Meta: + calcs_file = 'irradiance.json' + calcs_path = CALC_PATH + calculator = Calculator + is_dynamic = False + + +class PVPowerOutputs(Output): + """ + Outputs for PV Power demo + """ + class Meta: + outputs_file = 'pvpower.json' + outputs_path = os.path.join(PROJ_PATH, 'outputs') + + +class PerformanceOutputs(Output): + """ + Performance outputs for PV Power demo + """ + class Meta: + outputs_file = 'performance.json' + outputs_path = os.path.join(PROJ_PATH, 'outputs') + + +class IrradianceOutputs(Output): + """ + Irradiance outputs for PV Power demo + """ + class Meta: + outputs_file = 'irradiance.json' + outputs_path = os.path.join(PROJ_PATH, 'outputs') + + +class Standalone(Simulation): + """ + PV Power Demo Simulations + """ + pass + + +class SAPM(Model): + """ + PV Power Demo model + """ + class Meta: + modelpath = PROJ_PATH # folder containing project, not model diff --git a/examples/PVPower/pvpower/tests/test_pvpower.py b/examples/PVPower/pvpower/tests/test_pvpower.py index 0c7e79e..f15b49d 100644 --- a/examples/PVPower/pvpower/tests/test_pvpower.py +++ b/examples/PVPower/pvpower/tests/test_pvpower.py @@ -44,7 +44,7 @@ def test_daterange(): 'HOURLY', TZ, dtstart=DTSTART, count=test_range ) dtstart_local = PST.localize(DTSTART) - for hour in xrange(test_range): + for hour in range(test_range): assert dates[hour] == dtstart_local + timedelta(hours=hour) assert dates[hour].tzinfo.zone == TZ return dates diff --git a/examples/PVPower/pvpower/tests/test_pvpower.py.bak b/examples/PVPower/pvpower/tests/test_pvpower.py.bak new file mode 100644 index 0000000..0c7e79e --- /dev/null +++ b/examples/PVPower/pvpower/tests/test_pvpower.py.bak @@ -0,0 +1,114 @@ +""" +Tests for pvpower formulas +""" + +from datetime import datetime, timedelta +import numpy as np +import pytz +from carousel.core import UREG, logging, models +from pvpower.sandia_performance_model import ( + UtilityFormulas, IrradianceFormulas +) +from pvpower import sandia_performance_model, sandia_perfmod_newstyle +from pvpower.tests import MODEL_PATH +import os + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) +TZ = 'US/Pacific' +PST = pytz.timezone(TZ) +UTIL_FORMULAS = UtilityFormulas() +IRRAD_FORMULAS = IrradianceFormulas() +DTSTART = datetime(2007, 1, 1, 0, 0, 0) +MONTHLY_ENERGY = [186000.0, 168000.0, 186000.0, 180000.0, 186000.0, 180000.0, + 186000.0, 186000.0, 180000.0, 186000.0, 180000.0, 186000.0] +ZENITH = [ + 84.67227032399542, 75.69700469024768, 68.32442897476993, 63.22974106430276, + 61.01563669117582, 62.00067006350331, 66.0382089666321, 72.60444584135432, + 81.04253480488877 +] +AZIMUTH = [ + 124.64915808, 135.21222923, 147.46982483, 161.53685504, 176.95197338, + 192.61960738, 207.30533949, 220.27975359, 231.46642409 +] +OLD_MODEL = os.path.join(MODEL_PATH, 'sandia_performance_model-Tuscon.json') +ANNUAL_ENERGY = np.array(479083.75869040738) + + +def test_daterange(): + """ + Test date range. + """ + test_range = 12 + dates = UTIL_FORMULAS['f_daterange']( + 'HOURLY', TZ, dtstart=DTSTART, count=test_range + ) + dtstart_local = PST.localize(DTSTART) + for hour in xrange(test_range): + assert dates[hour] == dtstart_local + timedelta(hours=hour) + assert dates[hour].tzinfo.zone == TZ + return dates + + +def test_solarposition(): + """ + Test solar position algorithm. + """ + lat, lon = 38.0 * UREG.degrees, -122.0 * UREG.degrees + times = UTIL_FORMULAS['f_daterange']( + 'HOURLY', TZ, dtstart=(DTSTART + timedelta(hours=8)), count=9 + ) + cov = np.array([[0.0001, 0], [0, 0.0001]]) + solpos = IRRAD_FORMULAS['f_solpos'](times, lat, lon, __covariance__=cov) + assert len(solpos) == 4 + ze, az, cov, jac = solpos + assert ze.u == UREG.degree + assert az.u == UREG.degree + assert np.allclose(ze.m, ZENITH) + assert np.allclose(az.m, AZIMUTH) + return solpos + + +def test_rollup(): + """ + Test rollup. + """ + dates = UTIL_FORMULAS['f_daterange']('HOURLY', dtstart=DTSTART, count=8761) + ac_power = 1000. * np.sin(np.arange(12) * np.pi / 12.0) ** 2 + ac_power = np.pad(ac_power, [6, 6], 'constant') + ac_power = np.append(np.tile(ac_power, (365,)), [0]) * UREG.watt + energy, energy_times = UTIL_FORMULAS['f_energy'](ac_power, dates) + assert energy.units == UREG.Wh + monthly_energy = UTIL_FORMULAS['f_rollup'](energy, energy_times, 'MONTHLY') + assert np.allclose(monthly_energy[:12], MONTHLY_ENERGY) + return dates, ac_power, energy, energy_times, monthly_energy + + +def test_new_style(): + """ + Test new style Carousel model. + """ + m = sandia_perfmod_newstyle.NewSAPM() + assert isinstance(m, models.Model) + m.command('start') + annual_energy = np.sum(m.registries['outputs']['annual_energy'].m) + assert np.isclose(annual_energy, ANNUAL_ENERGY) + return m + + +def test_old_style(): + """ + Test old style Carousel model. + """ + m = sandia_performance_model.SAPM(OLD_MODEL) + assert isinstance(m, models.Model) + m.command('start') + annual_energy = np.sum(m.registries['outputs']['annual_energy'].m) + assert np.isclose(annual_energy, ANNUAL_ENERGY) + return m + + +if __name__ == "__main__": + results = test_rollup() + m_old = test_old_style() + m_new = test_new_style() From 2efbd9d181dd70f7c43e209822814a833062c78f Mon Sep 17 00:00:00 2001 From: Python3pkg Date: Thu, 18 May 2017 10:44:51 -0700 Subject: [PATCH 2/3] Remove .bak --- carousel/__init__.py.bak | 48 -- carousel/contrib/readers.py.bak | 158 ---- .../contrib/tests/test_data_readers.py.bak | 345 -------- carousel/core/__init__.py.bak | 374 -------- carousel/core/calculations.py.bak | 120 --- carousel/core/calculators.py.bak | 258 ------ carousel/core/data_readers.py.bak | 805 ------------------ carousel/core/data_sources.py.bak | 303 ------- carousel/core/formulas.py.bak | 300 ------- carousel/core/layers.py.bak | 408 --------- carousel/core/models.py.bak | 344 -------- carousel/core/outputs.py.bak | 141 --- carousel/core/simulations.py.bak | 555 ------------ carousel/docs/conf.py.bak | 314 ------- carousel/tests/test_calcs.py.bak | 124 --- carousel/tests/test_data.py.bak | 181 ---- carousel/tests/test_formulas.py.bak | 95 --- carousel/tests/test_outputs.py.bak | 40 - carousel/tests/test_sim.py.bak | 141 --- examples/PVPower/formulas/utils.py.bak | 100 --- .../pvpower/sandia_perfmod_newstyle.py.bak | 509 ----------- .../pvpower/sandia_performance_model.py.bak | 167 ---- .../PVPower/pvpower/tests/test_pvpower.py.bak | 114 --- 23 files changed, 5944 deletions(-) delete mode 100644 carousel/__init__.py.bak delete mode 100644 carousel/contrib/readers.py.bak delete mode 100644 carousel/contrib/tests/test_data_readers.py.bak delete mode 100644 carousel/core/__init__.py.bak delete mode 100644 carousel/core/calculations.py.bak delete mode 100644 carousel/core/calculators.py.bak delete mode 100644 carousel/core/data_readers.py.bak delete mode 100644 carousel/core/data_sources.py.bak delete mode 100644 carousel/core/formulas.py.bak delete mode 100644 carousel/core/layers.py.bak delete mode 100644 carousel/core/models.py.bak delete mode 100644 carousel/core/outputs.py.bak delete mode 100644 carousel/core/simulations.py.bak delete mode 100644 carousel/docs/conf.py.bak delete mode 100644 carousel/tests/test_calcs.py.bak delete mode 100644 carousel/tests/test_data.py.bak delete mode 100644 carousel/tests/test_formulas.py.bak delete mode 100644 carousel/tests/test_outputs.py.bak delete mode 100644 carousel/tests/test_sim.py.bak delete mode 100644 examples/PVPower/formulas/utils.py.bak delete mode 100644 examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak delete mode 100644 examples/PVPower/pvpower/sandia_performance_model.py.bak delete mode 100644 examples/PVPower/pvpower/tests/test_pvpower.py.bak diff --git a/carousel/__init__.py.bak b/carousel/__init__.py.bak deleted file mode 100644 index 8afd247..0000000 --- a/carousel/__init__.py.bak +++ /dev/null @@ -1,48 +0,0 @@ -""" -Carousel Python Model Simulation Framework - -Mark Mikofski (c) 2015 -""" - -import os -import importlib - -# try to import Dulwich or create dummies -try: - from dulwich.contrib.release_robot import get_current_version - from dulwich.repo import NotGitRepository -except ImportError: - NotGitRepository = NotImplementedError - - def get_current_version(*args, **kwargs): - raise NotGitRepository - -# Dulwich Release Robot -BASEDIR = os.path.dirname(__file__) # this directory -PROJDIR = os.path.dirname(BASEDIR) -VER_FILE = 'version' # name of file to store version -# use release robot to try to get current Git tag -try: - GIT_TAG = get_current_version(PROJDIR) -except NotGitRepository: - GIT_TAG = None -# check version file -try: - version = importlib.import_module('%s.%s' % (__name__, VER_FILE)) -except ImportError: - VERSION = None -else: - VERSION = version.VERSION -# update version file if it differs from Git tag -if GIT_TAG is not None and VERSION != GIT_TAG: - with open(os.path.join(BASEDIR, VER_FILE + '.py'), 'w') as vf: - vf.write('VERSION = "%s"\n' % GIT_TAG) -else: - GIT_TAG = VERSION # if Git tag is none use version file -VERSION = GIT_TAG # version - -__author__ = u'Mark Mikofski' -__email__ = u'mark.mikofski@sunpowercorp.com' -__url__ = u'https://github.com/SunPower/Carousel' -__version__ = VERSION -__release__ = u'Caramel Corn' diff --git a/carousel/contrib/readers.py.bak b/carousel/contrib/readers.py.bak deleted file mode 100644 index 20c0f1d..0000000 --- a/carousel/contrib/readers.py.bak +++ /dev/null @@ -1,158 +0,0 @@ -""" -Custom data readers including :class:`carousel.contrib.readers.ArgumentReader`, -:class:`carousel.contrib.readers.DjangoModelReader` and -:class:`carousel.contrib.readers.HDF5Reader`. -""" - -import numpy as np -import h5py -from carousel.core.data_readers import DataReader -from carousel.core.data_sources import DataParameter -from carousel.core import Q_ -import logging - -LOGGER = logging.getLogger(__name__) -LOGGER.setLevel(logging.DEBUG) - - -def copy_model_instance(obj): - """ - Copy Django model instance as a dictionary excluding automatically created - fields like an auto-generated sequence as a primary key or an auto-created - many-to-one reverse relation. - - :param obj: Django model object - :return: copy of model instance as dictionary - """ - meta = getattr(obj, '_meta') # make pycharm happy - # dictionary of model values excluding auto created and related fields - return {f.name: getattr(obj, f.name) - for f in meta.get_fields(include_parents=False) - if not f.auto_created} - - -# TODO: make parameters consistent for all readers -# TODO: parameters set by attributes in data source model fields -# EG: ghi = FloatField('GHI', units='W/m**2') -# EG: solar_azimuth = FloatField('solar azimuth', units='degrees') -# TODO: some parameters set in class Meta -# EG: class Meta: args = ['GHI', 'azimuth'] - -class ArgumentReader(DataReader): - """ - Read arguments passed directly to a simulation. - - The argument parameters dictionary should have two keys: `args` and `kwargs` - which consist of the names and attributes of the positional and keyword - arguments respectively. For example:: - - { - 'GHI': {'units': 'W/m**2', 'isconstant': False, 'argpos': 0}, - 'azimuth': {'units': 'degrees', 'isconstant': False, 'argpos': 1}, - 'DNI': {'units': 'W/m**2', 'isconstant': False}, - 'zenith': {'units': 'degrees', 'isconstant': False} - } - - """ - #: True if reader accepts ``filename`` argument - is_file_reader = False # not a file reader - - def load_data(self, *args, **kwargs): - """ - Collects positional and keyword arguments into `data` and applies units. - - :return: data - """ - # get positional argument names from parameters and apply them to args - # update data with additional kwargs - argpos = { - v['extras']['argpos']: k for k, v in self.parameters.iteritems() - if 'argpos' in v['extras'] - } - data = dict( - {argpos[n]: a for n, a in enumerate(args)}, **kwargs - ) - return self.apply_units_to_cache(data) - - def apply_units_to_cache(self, data): - """ - Applies units to data when a proxy reader is used. For example if the - data is cached as JSON and retrieved using the - :class:`~carousel.core.data_readers.JSONReader`, then units can be - applied from the original parameter schema. - - :param data: Data read by proxy reader. - :return: data with units applied - """ - # if units key exists then apply - for k, v in self.parameters.iteritems(): - if v and v.get('units'): - data[k] = Q_(data[k], v.get('units')) - return data - - -class DjangoModelReader(ArgumentReader): - """ - Reads arguments that are Django objects or lists of objects. - """ - def __init__(self, parameters=None, meta=None): - #: Django model - self.model = meta.model - model_meta = getattr(self.model, '_meta') # make pycharm happy - # model fields excluding AutoFields and related fields like one-to-many - all_model_fields = [ - f for f in model_meta.get_fields(include_parents=False) - if not f.auto_created - ] - all_field_names = [f.name for f in all_model_fields] # field names - # use all fields if no parameters given - if parameters is None: - parameters = DataParameter.fromkeys( - all_field_names, {} - ) - fields = getattr(meta, 'fields', all_field_names) # specified fields - LOGGER.debug('fields:\n%r', fields) - exclude = getattr(meta, 'exclude', []) # specifically excluded fields - for f in all_model_fields: - # skip any fields not specified in data source - if f.name not in fields or f.name in exclude: - LOGGER.debug('skipping %s', f.name) - continue - # add field to parameters or update parameters with field type - param_dict = {'ftype': f.get_internal_type()} - if f.name in parameters: - parameters[f.name]['extras'].update(param_dict) - else: - parameters[f.name] = DataParameter(**param_dict) - super(DjangoModelReader, self).__init__(parameters, meta) - - def load_data(self, model_instance, *args, **kwargs): - """ - Apply units to model. - :return: data - """ - model_dict = copy_model_instance(model_instance) - return super(DjangoModelReader, self).load_data(**model_dict) - - -class HDF5Reader(ArgumentReader): - """ - Reads data from an HDF5 file - """ - #: True if reader accepts ``filename`` argument - is_file_reader = True # is a file reader - - def load_data(self, h5file, *args, **kwargs): - with h5py.File(h5file) as h5f: - h5data = dict.fromkeys(self.parameters) - for param, attrs in self.parameters.iteritems(): - LOGGER.debug('parameter:\n%r', param) - node = attrs['extras']['node'] # full name of node - # composite datatype member - member = attrs['extras'].get('member') - if member is not None: - # if node is a table then get column/field/description - h5data[param] = np.asarray(h5f[node][member]) # copy member - else: - h5data[param] = np.asarray(h5f[node]) # copy array - return super(HDF5Reader, self).load_data(**h5data) diff --git a/carousel/contrib/tests/test_data_readers.py.bak b/carousel/contrib/tests/test_data_readers.py.bak deleted file mode 100644 index 86f6dec..0000000 --- a/carousel/contrib/tests/test_data_readers.py.bak +++ /dev/null @@ -1,345 +0,0 @@ -""" -Test contrib data readers. -""" - -from carousel.core.data_readers import DataReader -from carousel.contrib.readers import ( - ArgumentReader, DjangoModelReader, HDF5Reader -) -from carousel.core.data_sources import DataSourceBase, DataSource, DataParameter -from datetime import datetime -from carousel.core import UREG -from django.db import models -import django -from django.apps import AppConfig -from django.conf import settings -import sys -import mock -import logging -import h5py -import os -import numpy as np - -DIRNAME = os.path.dirname(__file__) -LOGGER = logging.getLogger(__name__) -LOGGER.setLevel(logging.DEBUG) -# test data -TAIR = 25.0 -LAT = 38.0 -LON = -122.0 -TZ = -8.0 -H5TEST1 = os.path.join(DIRNAME, 'test1.h5') -H5TEST2 = os.path.join(DIRNAME, 'test2.h5') -H5DTYPE = [('GlobalHorizontalRadiation', ' %(asctime)s %(funcName)s:%(lineno)d\n> ' + - '\n'.join(logging.BASIC_FORMAT.rsplit(':', 1))) -logging.basicConfig(datefmt=LOG_DATEFMT, format=LOG_FORMAT) -LOGGER = logging.getLogger(__name__) -LOGGER.setLevel(logging.DEBUG) - -# unit registry, quantity constructor and extra units registry definitions -UREG = pint.UnitRegistry() # registry of units -Q_ = UREG.Quantity # quantity constructor for ambiguous quantities like degC -UREG.define('fraction = []') # define new dimensionless base unit for percents -UREG.define('percent = fraction / 100.0 = pct') # can't use "%" only ascii -UREG.define('suns = []') # dimensionless unit equivalent to 1000.0 [W/m/m] - -# define PV solar context -_PV = pint.Context('pv') -# define transformation of suns to power flux and vice versa -E0 = 1000.0 * UREG.W / UREG.m / UREG.m # 1 sun -_PV.add_transformation('[]', '[power] / [area]', lambda ureg, x: x * E0) -_PV.add_transformation('[power] / [area]', '[]', lambda ureg, x: x / E0) -UREG.add_context(_PV) - - -def _listify(x): - """ - If x is not a list, make it a list. - """ - return list(x) if isinstance(x, (list, tuple)) else [x] - - -class Registry(dict): - """ - Base class for a registry. - - The register method can be used to add new keys to the registry only if - they don't already exist. A registry can also have meta data associated - with subsets of the registered keys. To enforce rules on meta when the keys - are registered, override the register method and raise exceptions before - calling the :func:`super` built-in function. - - By default there are no meta attributes, only the register method. - To set meta attributes, in a subclass, set the ``meta_names`` class - attribute in the subclass:: - - class MyRegistry(Registry): - meta_names = ['meta1', 'meta2', ...] - - The ``Registry`` superclass will check that the meta names are not already - attributes and then set instance attributes as empty dictionaries in the - subclass. To document them, use the class docstring or document them in the - documentation API. - """ - meta_names = [] - - def __init__(self): - self.meta_names = _listify(self.meta_names) # convert to list - for m in self.meta_names: - # check for m in cls and bases - if m in dir(Registry): - msg = ('Class %s already has %s member.' % - (self.__class__.__name__, m)) - raise AttributeError(msg) - setattr(self, m, {}) # create instance attribute and set to dict() - super(Registry, self).__init__() - - def register(self, newitems, *args, **kwargs): - """ - Register newitems in registry. - - :param newitems: New items to add to registry. When registering new - items, keys are not allowed to override existing keys in the - registry. - :type newitems: mapping - :param args: Positional arguments with meta data corresponding to order - of meta names class attributes - :param kwargs: Maps of corresponding meta for new keys. Each set of - meta keys must be a subset of the new item keys. - :raises: - :exc:`~carousel.core.exceptions.DuplicateRegItemError`, - :exc:`~carousel.core.exceptions.MismatchRegMetaKeysError` - """ - newkeys = newitems.viewkeys() # set of the new item keys - if any(self.viewkeys() & newkeys): # duplicates - raise DuplicateRegItemError(self.viewkeys() & newkeys) - self.update(newitems) # register new item - # update meta fields - kwargs.update(zip(self.meta_names, args)) - for k, v in kwargs.iteritems(): - meta = getattr(self, k) # get the meta attribute - if v: - if not v.viewkeys() <= newkeys: - raise MismatchRegMetaKeysError(newkeys - v.viewkeys()) - meta.update(v) # register meta - - def unregister(self, items): - """ - Remove items from registry. - - :param items: - """ - items = _listify(items) - # get all members of Registry except private, special or class - meta_names = (m for m in vars(self).iterkeys() - if (not m.startswith('_') and m not in dir(Registry))) - # check that meta names matches - # FIXME: this is so lame. replace this with something more robust - for m in meta_names: - if m not in self.meta_names: - raise AttributeError('Meta name %s not listed.') - # pop items from Registry and from meta - for it in items: - if it in self: - self.pop(it) - for m in (getattr(self, m_) for m_ in self.meta_names): - if it in m: - m.pop(it) - - -# decorator to use with formulas to convert argument units -def convert_args(test_fcn, *test_args): - """ - Decorator to be using in formulas to convert ``test_args`` depending on - the ``test_fcn``. - - :param test_fcn: A test function that converts arguments. - :type test_fcn: function - :param test_args: Names of args to convert using ``test_fcn``. - :type test_args: str - - The following test functions are available. - * :func:`dimensionless_to_index` - - Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: - - @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') - def f_max_T(Tcell24, dawn_idx, eve_idx): - idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) - return Tcell24[idx], idx - """ - def wrapper(origfcn): - @functools.wraps(origfcn) - def newfcn(*args, **kwargs): - argspec = getargspec(origfcn) # use ``inspect`` to get arg names - kwargs.update(zip(argspec.args, args)) # convert args to kw - # loop over test args - for a in test_args: - # convert a if it's in args - if a in argspec.args: - kwargs[a] = test_fcn(kwargs[a]) # update kwargs - # call original function with converted args - return origfcn(**kwargs) - # return wrapped function - return newfcn - # return the wrapper function that consumes the original function - return wrapper - -# NOTE: Preferred way to compare units is with dimensionality -# EG: (25 * UREG.degC).dimensionality == UREG.degC.dimensionality -# XXX: Really? because this works too, seems way better! -# EG: (25 * UREG.degC).units = UREG.degC - - -def dimensionless_to_index(index): - # convert dimensionless to index - if not index.dimensionality: - index = index.magnitude - else: - raise TypeError('Indices must be dimensionless.') - # TODO: make an exception called IndexUnitsError - return index - - -# custom JSON encoder to serialize Quantities and NumPy arrays -class CarouselJSONEncoder(json.JSONEncoder): - def default(self, o): - """ - JSONEncoder default method that converts NumPy arrays and quantities - objects to lists. - """ - if isinstance(o, Q_): - return o.magnitude - elif isinstance(o, np.ndarray): - return o.tolist() - else: - # raise TypeError if not serializable - return super(CarouselJSONEncoder, self).default(o) - - -def get_public_attributes(cls, as_list=True): - """ - Return class attributes that are neither private nor magic. - - :param cls: class - :param as_list: [True] set to False to return generator - :return: only public attributes of class - """ - attrs = (a for a in dir(cls) if not a.startswith('_')) - if as_list: - return list(attrs) - return attrs - - -class CommonBase(type): - """ - Provides common metaclass methods. - - * :meth:`get_parents` ensures initialization only from subclasses of the - main class and not the main class itself - * :meth:`set_param_file_or_parameters` adds class attributes ``param_file`` - or ``parameters`` depending on whether the path and file of the parameters - are given or if the parameters are listed as class attributes. - - Base classes must implement the ``_path_attr`` and ``_file_attr`` as class - attributes:: - - class ExampleBase(CommonBase): - _path_attr = 'outputs_path' # class attribute with parameter path - _file_attr = 'outputs_file' # class attribute with parameter file - """ - _path_attr = NotImplemented - _file_attr = NotImplemented - _param_cls = NotImplemented - # names of inferred objects - _meta_cls = 'Meta' # nested class or dictionary containing class options - _meta_attr = '_meta' # collected meta from classes, bases and files - _param_attr = 'parameters' # parameters collected from classes and files - _param_file = 'param_file' # optional file containing parameters - - @classmethod - def set_meta(mcs, bases, attr): - """ - Get all of the ``Meta`` classes from bases and combine them with this - class. - - Pops or creates ``Meta`` from attributes, combines all bases, adds - ``_meta`` to attributes with all meta - - :param bases: bases of this class - :param attr: class attributes - :return: attributes with ``Meta`` class from combined parents - """ - # pop the meta class from the attributes - meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) - # get a list of the meta public class attributes - meta_attrs = get_public_attributes(meta) - # check all bases for meta - for base in bases: - base_meta = getattr(base, mcs._meta_cls, None) - # skip if base has no meta - if base_meta is None: - continue - # loop over base meta - for a in get_public_attributes(base_meta, as_list=False): - # skip if already in meta - if a in meta_attrs: - continue - # copy meta-option attribute from base - setattr(meta, a, getattr(base_meta, a)) - attr[mcs._meta_attr] = meta # set _meta combined from bases - return attr - - @classmethod - def set_param_file_or_parameters(mcs, attr): - """ - Set parameters from class attributes that are instances of - :class:`~carousel.core.Parameter` or from a parameter file. - - Any class attributes that are instances of - :class:`~carousel.core.Parameter` are popped from the class and added to - a the ``parameters`` attribute, which is a dictionary of the parameters. - - :param attr: class attributes - :return: new list of class attributes with parameters - """ - meta = attr[mcs._meta_attr] # look for parameter file path in meta - cls_path = getattr(meta, mcs._path_attr, None) - cls_file = getattr(meta, mcs._file_attr, None) - # read parameters - attr[mcs._param_attr] = {} - attr[mcs._param_file] = None - # read parameters from file - if None not in [cls_path, cls_file]: - param_file = os.path.join(cls_path, cls_file) - attr[mcs._param_file] = param_file - # read and load JSON parameter map file as "parameters" - with open(param_file, 'r') as param_file: - file_params = json.load(param_file) - # update meta from file - for k, v in file_params.pop(mcs._meta_cls, {}).iteritems(): - setattr(meta, k, v) - # dictionary of parameters for reading source file - attr[mcs._param_attr] = { - k: mcs._param_cls(**v) for k, v in file_params.iteritems() - } - # get parameters from class - parameters = dict.fromkeys( - k for k, v in attr.iteritems() if isinstance(v, Parameter) - ) - # update parameters - for k in parameters: - attr[mcs._param_attr][k] = attr.pop(k) - return attr - - @staticmethod - def get_parents(bases, parent): - """ - Ensures that initialization only performed on subclasses of parent - https://github.com/django/django/blob/master/django/db/models/base.py - - :param bases: Bases to compare against parent. - :type bases: list - :param parent: Superclass that bases should be subclassed from. - :return: Bases subclassed from parent. - :rtype: list - """ - return [b for b in bases if isinstance(b, parent)] - - -class Parameter(dict): - _attrs = [] - - def __init__(self, *args, **kwargs): - items = dict(zip(self._attrs, args)) - extras = {} - for key, val in kwargs.iteritems(): - if key in self._attrs: - items[key] = val - else: - extras[key] = val - LOGGER.warning('This key: "%s" is not an attribute.', key) - super(Parameter, self).__init__(items, extras=extras) - - def __repr__(self): - fmt = ('<%s(' % self.__class__.__name__) - fmt += ', '.join('%s=%r' % (k, v) for k, v in self.iteritems()) - fmt += ')>' - return fmt diff --git a/carousel/core/calculations.py.bak b/carousel/core/calculations.py.bak deleted file mode 100644 index acda162..0000000 --- a/carousel/core/calculations.py.bak +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -This module provides base classes for calculations. All calculations should -inherit from one of the calcs in this module. -""" - -from carousel.core import logging, CommonBase, Registry, UREG, Parameter -from carousel.core.calculators import Calculator - -LOGGER = logging.getLogger(__name__) - - -class CalcParameter(Parameter): - """ - Fields for calculations. - """ - _attrs = ['dependencies', 'always_calc', 'frequency', 'formula', 'args', - 'returns', 'calculator', 'is_dynamic'] - - -class CalcRegistry(Registry): - """ - A registry for calculations. Each key is a calculation. The value - of each calculation is split into 2 dictionaries: "static" and - "dynamic". Static calculations occur once at the beginning of a simulation - and dynamic calculations occur at every interval. The contents of either - the "static" or "dynamic" key is an ordered list of formulas, their - arguments and return values. - - Calculations can list `dependencies `_ that must be - calculated first. Calculations marked as `always_calc` will not be limited - by thresholds set in simulations. The frequency determines how often to - dynamic calculations occur. Frequency can be given in intervals or can list - a quantity of time, _EG:_ ``2 * UREG.hours``. - """ - #: meta names - meta_names = ['dependencies', 'always_calc', 'frequency', 'calculator', - 'is_dynamic', 'calc_source'] - - def register(self, new_calc, *args, **kwargs): - """ - Register calculations and meta data. - - * ``dependencies`` - list of prerequisite calculations - * ``always_calc`` - ``True`` if calculation ignores thresholds - * ``frequency`` - frequency of calculation in intervals or units of time - - :param new_calc: register new calculation - """ - kwargs.update(zip(self.meta_names, args)) - # dependencies should be a list of other calculations - if isinstance(kwargs['dependencies'], basestring): - kwargs['dependencies'] = [kwargs['dependencies']] - # call super method, now meta can be passed as args or kwargs. - super(CalcRegistry, self).register(new_calc, **kwargs) - - -class CalcBase(CommonBase): - """ - Base calculation meta class. - """ - _path_attr = 'calcs_path' - _file_attr = 'calcs_file' - _param_cls = CalcParameter - - def __new__(mcs, name, bases, attr): - # use only with Calc subclasses - if not CommonBase.get_parents(bases, CalcBase): - return super(CalcBase, mcs).__new__(mcs, name, bases, attr) - # set _meta combined from bases - attr = mcs.set_meta(bases, attr) - # set param file full path if calculations path and file specified or - # try to set parameters from class attributes except private/magic - attr = mcs.set_param_file_or_parameters(attr) - return super(CalcBase, mcs).__new__(mcs, name, bases, attr) - - -class Calc(object): - """ - A class for all calculations. - """ - __metaclass__ = CalcBase - - def __init__(self): - meta = getattr(self, CalcBase._meta_attr) - parameters = getattr(self, CalcBase._param_attr) - #: ``True`` if always calculated (day and night) - self.always_calc = dict.fromkeys( - parameters, getattr(meta, 'always_calc', False) - ) - freq = getattr(meta, 'frequency', [1, '']) - #: frequency calculation is calculated in intervals or units of time - self.frequency = dict.fromkeys(parameters, freq[0] * UREG(str(freq[1]))) - #: dependencies - self.dependencies = dict.fromkeys( - parameters, getattr(meta, 'dependencies', []) - ) - #: name of :class:`Calc` superclass - self.calc_source = dict.fromkeys(parameters, self.__class__.__name__) - #: calculator - self.calculator = dict.fromkeys( - parameters, getattr(meta, 'calculator', Calculator) - ) - #: ``True`` if calculations are dynamic, ``False`` if static - self.is_dynamic = dict.fromkeys( - parameters, getattr(meta, 'is_dynamic', False) - ) - #: calculations - self.calcs = {} - for k, v in parameters.iteritems(): - self.calcs[k] = { - key: v[key] for key in ('formula', 'args', 'returns') - } - keys = ('dependencies', 'always_calc', 'frequency', 'calculator', - 'is_dynamic') - for key in keys: - value = v.get(key) - if value is not None: - getattr(self, key)[k] = value diff --git a/carousel/core/calculators.py.bak b/carousel/core/calculators.py.bak deleted file mode 100644 index d83d6d5..0000000 --- a/carousel/core/calculators.py.bak +++ /dev/null @@ -1,258 +0,0 @@ -""" -Calculators are used to execute calculations. -""" - -from carousel.core import logging, UREG -import numpy as np - -LOGGER = logging.getLogger(__name__) - - -def index_registry(args, reg, ts=None, idx=None): - """ - Index into a :class:`~carousel.core.Registry` to return arguments - from :class:`~carousel.core.data_sources.DataRegistry` and - :class:`~carousel.core.outputs.OutputRegistry` based on the - calculation parameter file. - - :param args: Arguments field from the calculation parameter file. - :param reg: Registry in which to index to get the arguments. - :type reg: :class:`~carousel.core.data_sources.DataRegistry`, - :class:`~carousel.core.outputs.OutputRegistry` - :param ts: Time step [units of time]. - :param idx: [None] Index of current time step for dynamic calculations. - - Required arguments for static and dynamic calculations are specified in the - calculation parameter file by the "args" key. Arguments can be from - either the data registry or the outputs registry, which is denoted by the - "data" and "outputs" keys. Each argument is a dictionary whose key is the - name of the argument in the formula specified and whose value can be one of - the following: - - * The name of the argument in the registry :: - - {"args": {"outputs": {"T_bypass": "T_bypass_diode"}}} - - maps the formula argument "T_bypass" to the outputs registry item - "T_bypass_diode". - - * A list with the name of the argument in the registry as the first element - and a negative integer denoting the index relative to the current - timestep as the second element :: - - {"args": {"data": {"T_cell": ["Tcell", -1]}}} - - indexes the previous timestep of "Tcell" from the data registry. - - * A list with the name of the argument in the registry as the first element - and a list of positive integers denoting the index into the item from the - registry as the second element :: - - {"args": {"data": {"cov": ["bypass_diode_covariance", [2]]}}} - - indexes the third element of "bypass_diode_covariance". - - * A list with the name of the argument in the registry as the first - element, a negative real number denoting the time relative to the current - timestep as the second element, and the units of the time as the third :: - - {"args": {"data": {"T_cell": ["Tcell", -1, 'day']}}} - - indexes the entire previous day of "Tcell". - """ - # TODO: move this to new Registry method or __getitem__ - # TODO: replace idx with datetime object and use timeseries to interpolate - # into data, not necessary for outputs since that will conform to idx - rargs = dict.fromkeys(args) # make dictionary from arguments - # iterate over arguments - for k, v in args.iteritems(): - # var ------------------ states ------------------ - # idx ===== not None ===== ======= None ======= - # isconstant True False None True False None - # is_dynamic no yes yes no no no - is_dynamic = idx and not reg.isconstant.get(v) - # switch based on string type instead of sequence - if isinstance(v, basestring): - # the default assumes the current index - rargs[k] = reg[v][idx] if is_dynamic else reg[v] - elif len(v) < 3: - if reg.isconstant[v[0]]: - # only get indices specified by v[1] - # tuples interpreted as a list of indices, see - # NumPy basic indexing: Dealing with variable - # numbers of indices within programs - rargs[k] = reg[v[0]][tuple(v[1])] - elif v[1] < 0: - # specified offset from current index - rargs[k] = reg[v[0]][idx + v[1]] - else: - # get indices specified by v[1] at current index - rargs[k] = reg[v[0]][idx][tuple(v[1])] - else: - # specified timedelta from current index - dt = 1 + (v[1] * UREG(str(v[2])) / ts).item() - # TODO: deal with fractions of timestep - rargs[k] = reg[v[0]][(idx + dt):(idx + 1)] - return rargs - - -class Calculator(object): - """ - Base class for calculators. Must implement ``calculate`` method. - """ - shortname = '' - - @staticmethod - def get_covariance(datargs, outargs, vargs, datvar, outvar): - """ - Get covariance matrix. - - :param datargs: data arguments - :param outargs: output arguments - :param vargs: variable arguments - :param datvar: variance of data arguments - :param outvar: variance of output arguments - :return: covariance - """ - # number of formula arguments that are not constant - argn = len(vargs) - # number of observations must be the same for all vargs - nobs = 1 - for m in xrange(argn): - a = vargs[m] - try: - a = datargs[a] - except (KeyError, TypeError): - a = outargs[a] - avar = outvar[a] - else: - avar = datvar[a] - for n in xrange(argn): - b = vargs[n] - try: - b = datargs[b] - except (KeyError, TypeError): - b = outargs[b] - c = avar.get(b, 0.0) - try: - nobs = max(nobs, len(c)) - except (TypeError, ValueError): - LOGGER.debug('c of %s vs %s = %g', a, b, c) - # covariance matrix is initially zeros - cov = np.zeros((nobs, argn, argn)) - # loop over arguments in both directions, fill in covariance - for m in xrange(argn): - a = vargs[m] - try: - a = datargs[a] - except (KeyError, TypeError): - a = outargs[a] - avar = outvar[a] - else: - avar = datvar[a] - for n in xrange(argn): - b = vargs[n] - try: - b = datargs[b] - except (KeyError, TypeError): - b = outargs[b] - cov[:, m, n] = avar.get(b, 0.0) - if nobs == 1: - cov = cov.squeeze() # squeeze out any extra dimensions - LOGGER.debug('covariance:\n%r', cov) - return cov - - @classmethod - def calculate(cls, calc, formula_reg, data_reg, out_reg, - timestep=None, idx=None): - """ - Execute calculation - - :param calc: calculation, with formula, args and return keys - :type calc: dict - :param formula_reg: Registry of formulas. - :type formula_reg: :class:`~carousel.core.FormulaRegistry` - :param data_reg: Data registry. - :type data_reg: :class:`~carousel.core.data_sources.DataRegistry` - :param out_reg: Outputs registry. - :type out_reg: :class:`~carousel.core.outputs.OutputRegistry` - :param timestep: simulation interval length [time], default is ``None`` - :param idx: interval index, default is ``None`` - :type idx: int - """ - # get the formula-key from each static calc - formula = calc['formula'] # name of formula in calculation - func = formula_reg[formula] # formula function object - fargs = formula_reg.args.get(formula, []) # formula arguments - constants = formula_reg.isconstant.get(formula) # constant args - # formula arguments that are not constant - vargs = [] if constants is None else [a for a in fargs if a not in constants] - args = calc['args'] # calculation arguments - # separate data and output arguments - datargs, outargs = args.get('data', {}), args.get('outputs', {}) - data = index_registry(datargs, data_reg, timestep, idx) - outputs = index_registry(outargs, out_reg, timestep, idx) - kwargs = dict(data, **outputs) # combined data and output args - args = [kwargs.pop(a) for a in fargs if a in kwargs] - returns = calc['returns'] # return arguments - # if constants is None then the covariance should also be None - # TODO: except other values, eg: "all" to indicate no covariance - if constants is None: - cov = None # do not propagate uncertainty - else: - # get covariance matrix - cov = cls.get_covariance(datargs, outargs, vargs, - data_reg.variance, out_reg.variance) - # update kwargs with covariance if it exists - kwargs['__covariance__'] = cov - retval = func(*args, **kwargs) # calculate function - # update output registry with covariance and jacobian - if cov is not None: - # split uncertainty and jacobian from return values - cov, jac = retval[-2:] - retval = retval[:-2] - # scale covariance - scale = np.asarray( - [1 / r.m if isinstance(r, UREG.Quantity) else 1 / r - for r in retval] - ) # use magnitudes if quantities - cov = (np.swapaxes((cov.T * scale), 0, 1) * scale).T - nret = len(retval) # number of return output - for m in xrange(nret): - a = returns[m] # name in output registry - out_reg.variance[a] = {} - out_reg.uncertainty[a] = {} - out_reg.jacobian[a] = {} - for n in xrange(nret): - b = returns[n] - out_reg.variance[a][b] = cov[:, m, n] - if a == b: - unc = np.sqrt(cov[:, m, n]) * 100 * UREG.percent - out_reg.uncertainty[a][b] = unc - for n in xrange(len(vargs)): - b = vargs[n] - try: - b = datargs[b] - except (KeyError, TypeError): - b = outargs[b] - out_reg.jacobian[a][b] = jac[:, m, n] - LOGGER.debug('%s cov:\n%r', a, out_reg.variance[a]) - LOGGER.debug('%s jac:\n%r', a, out_reg.jacobian[a]) - LOGGER.debug('%s unc:\n%r', a, out_reg.uncertainty[a]) - # if there's only one return value, squeeze out extra dimensions - if len(retval) == 1: - retval = retval[0] - # put return values into output registry - if len(returns) > 1: - # more than one return, zip them up - if idx is None: - out_reg.update(zip(returns, retval)) - else: - for k, v in zip(returns, retval): - out_reg[k][idx] = v - else: - # only one return, get it by index at 0 - if idx is None: - out_reg[returns[0]] = retval - else: - out_reg[returns[0]][idx] = retval diff --git a/carousel/core/data_readers.py.bak b/carousel/core/data_readers.py.bak deleted file mode 100644 index 378fffb..0000000 --- a/carousel/core/data_readers.py.bak +++ /dev/null @@ -1,805 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module provides the base classes for data readers, such as -`XLRD `_ and :func:`numpy.loadtxt`, -which are used to read in data sources. -""" - -from StringIO import StringIO -from carousel.core import UREG, Q_ -from carousel.core.exceptions import ( - UnnamedDataError, MixedTextNoMatchError -) -from xlrd import open_workbook -import csv -import numpy as np -import json -import os -import time -import re - -# regex pattern for %e, %E, %f and %g -# http://docs.python.org/2/library/re.html#simulating-scanf -# use (?...) for non capturing groups -EFG_PATTERN = '([-+]?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][-+]?\\d+)?)' -# whitelist regex methods -RE_METH = ['search', 'match', 'findall', 'split'] - - -class DataReader(object): - """ - Required interface for all Carousel data readers. - - :param parameters: parameters to be read - :type parameters: dict - """ - #: True if reader accepts ``filename`` argument - is_file_reader = True # overload in subclasses - - def __init__(self, parameters, meta=None): - #: parameters to be read by reader - self.parameters = parameters - #: meta if any - self.meta = meta - - def load_data(self, *args, **kwargs): - """ - Load data from source using reader. This method must be implemented by - each data reader. - - :param args: positional arguments - :param kwargs: keyword arguments - :returns: data read by :class:`DataReader` - :rtype: dict - :raises: :exc:`~exceptions.NotImplementedError` - """ - raise NotImplementedError('load_data') - - def apply_units_to_cache(self, data): - """ - Apply units to cached data. This method must be implemented by each data - reader. - - :param data: cached data - :return: data with units applied - :rtype: :class:`~pint.unit.Quantity` - :raises: :exc:`~exceptions.NotImplementedError` - """ - raise NotImplementedError('apply_units_to_cache') - - -class JSONReader(DataReader): - """ - Read data from a JSON file. - - :param parameters: parameters to read - :type parameters: dict - :param data_reader: original :class:`DataReader` if data cached as JSON - - This the default data reader if not specified in the data source. The format - of the data is similar to the dictionary used to create the data registry, - except without units. - - For example:: - - { - "data": { - "DNI": [834, 523, 334, 34, 0, 0], - "zenith": [21, 28, 45, 79, 90, 90] - }, - "param_file": "path/to/corresponding/param_file.json", - "data_source": "MyDataSource" - } - - Parameters can be specified in a JSON file. :: - - { - "DNI": { - "description": "direct normal insolation", - "units": "W/m*^2", - "isconstant": false - }, - "zenith": { - "description": "solar zenith", - "units": "degrees", - "isconstant": false - } - } - - Parameters can also be specified in the data source as class attributes. :: - - class MyDataSrc(DataSource): - data_reader = JSONReader - DNI = { - "description": "direct normal insolation", - "units": "W/m*^2", - "isconstant": false - } - zenith = { - "description": "solar zenith", - "units": "degrees", - "isconstant": false - } - - """ - def __init__(self, parameters, meta=None): - super(JSONReader, self).__init__(parameters, meta) - #: original data reader [None] - self.orig_data_reader = meta.data_reader - - def load_data(self, filename, *args, **kwargs): - """ - Load JSON data. - - :param filename: name of JSON file with data - :type filename: str - :return: data - :rtype: dict - """ - # append .json extension if needed - if not filename.endswith('.json'): - filename += '.json' # append "json" to filename - # open file and load JSON data - with open(filename, 'r') as fid: - json_data = json.load(fid) - # if JSONReader is the original reader then apply units and return - if (not self.orig_data_reader or - isinstance(self, self.orig_data_reader)): - return self.apply_units_to_cache(json_data['data']) - # last modification since JSON file was saved - utc_mod_time = json_data.get('utc_mod_time') - # instance of original data reader with original parameters - orig_data_reader_obj = self.orig_data_reader(self.parameters, self.meta) - # check if file has been modified since saved as JSON file - if utc_mod_time: - # convert to ordered tuple - utc_mod_time = time.struct_time(utc_mod_time) - orig_filename = filename[:-5] # original filename - # use original file if it's been modified since JSON file saved - if utc_mod_time < time.gmtime(os.path.getmtime(orig_filename)): - os.remove(filename) # delete JSON file - return orig_data_reader_obj.load_data(orig_filename) - # use JSON file if original file hasn't been modified - return orig_data_reader_obj.apply_units_to_cache(json_data['data']) - - def apply_units_to_cache(self, data): - """ - Apply units to data read using :class:`JSONReader`. - - :param data: cached data - :return: data with units applied - :rtype: :class:`~pint.unit.Quantity` - """ - for k, val in self.parameters.iteritems(): - if 'units' in val: - data[k] = Q_(data[k], val.get('units')) - return data - - -class XLRDReader(DataReader): - """ - Read data using XLRD. - - The :attr:`~DataReader.parameters` argument is a dictionary. Each item is a - dictionary that contains the following keys: "description", "units", "range" - and "sheet". The parameters argument should be the parameters argument - created by a :class:`~carousel.core.data_sources.DataSource` and the values - of each item should be a :class:`~carousel.core.data_sources.DataParameter`. - Therefore any non-parameter attributes like "range" and "sheet" would be in - "extras". - - If the range is a ... - - * single cell -- use [rowx, colx]. - * 2-D range -- use 2 arrays, [start, stop], each with [rowx, colx]. - * column slice -- use an array and an int, [slice, colx], in which slice is - [start-rowx, stop-rowx]. Set stop-rowx to ``None`` to read the rest of - the column after start-rowx. - * row slice -- use [rowx, slice] in which slice is [start-colx, stop-colx]. - Set stop-colx to ``None`` to read the rest of the row after start-colx. - * column -- use [None, colx] or [[], colx] - * row -- use [rowx, None] or [rowx, []] - - .. seealso:: - `The xlrd Module `_ - - Example of :attr:`~DataReader.parameters`:: - - parameters = { - "month": { - "description": "month of year", - "units": "month", - "range": [[2, 8762], 2], - "sheet": "Level 1 Outputs" - }, - "day": { - "description": "day of month", - "units': "day", - "range': [[2, 8762], 3]} - "sheet": "Level 1 Outputs" - }, - "PAC": { - "description": "AC power", - "units": "kW", - "range": [[2, 8762], 12], - "sheet": "Level 2 Outputs" - }, - "PDC": { - "description": "DC power", - "units": "kW", - "range": [[2, 8762], 13], - "sheet": "Level 2 Outputs" - } - } - - This loads "month" and "day" data from columns 2 and 3 in the "Level 1 - Outputs" sheet and "PAC" and "PDC" data from columns 12 and 13 in the - "Level 2 Outputs" sheets. The units for each data set and a description is - also given. Each of the data columns is 8760 rows long, from row 2 to row - 8762. Don't forget that indexing starts at 0, so row 2 is the 3rd row. - """ - - def load_data(self, filename, *args, **kwargs): - """ - Load parameters from Excel spreadsheet. - - :param filename: Name of Excel workbook with data. - :type filename: str - :returns: Data read from Excel workbook. - :rtype: dict - """ - # workbook read from file - workbook = open_workbook(filename, verbosity=True) - data = {} # an empty dictionary to store data - # iterate through sheets in parameters - # iterate through the parameters on each sheet - for param, pval in self.parameters.iteritems(): - sheet = pval['extras']['sheet'] - # get each worksheet from the workbook - worksheet = workbook.sheet_by_name(sheet) - # split the parameter's range elements - prng0, prng1 = pval['extras']['range'] - # missing "units", json ``null`` and Python ``None`` all OK! - # convert to str from unicode, None to '' (dimensionless) - punits = str(pval.get('units') or '') - # replace None with empty list - if prng0 is None: - prng0 = [] - if prng1 is None: - prng1 = [] - # FIXME: Use duck-typing here instead of type-checking! - # if both elements in range are `int` then parameter is a cell - if isinstance(prng0, int) and isinstance(prng1, int): - datum = worksheet.cell_value(prng0, prng1) - # if the either element is a `list` then parameter is a slice - elif isinstance(prng0, list) and isinstance(prng1, int): - datum = worksheet.col_values(prng1, *prng0) - elif isinstance(prng0, int) and isinstance(prng1, list): - datum = worksheet.row_values(prng0, *prng1) - # if both elements are `list` then parameter is 2-D - else: - datum = [] - for col in xrange(prng0[1], prng1[1]): - datum.append(worksheet.col_values(col, prng0[0], - prng1[0])) - # duck typing that datum is real - try: - npdatum = np.array(datum, dtype=np.float) - except ValueError as err: - # check for iterable: - # if `datum` can't be coerced to float, then it must be - # *string* & strings *are* iterables, so don't check! - # check for strings: - # data must be real or *all* strings! - # empty string, None or JSON null also OK - # all([]) == True but any([]) == False - if not datum: - data[param] = None # convert empty to None - elif all(isinstance(_, basestring) for _ in datum): - data[param] = datum # all str is OK (EG all 'TMY') - elif all(not _ for _ in datum): - data[param] = None # convert list of empty to None - else: - raise err # raise ValueError if not all real or str - else: - data[param] = npdatum * UREG(punits) - # FYI: only put one statement into try-except test otherwise - # might catch different error than expected. use ``else`` as - # option to execute only if exception *not* raised. - return data - - def apply_units_to_cache(self, data): - """ - Apply units to cached data read using :class:`JSONReader`. - - :param data: Cached data. - :type data: dict - :return: data with units - """ - # iterate through sheets in parameters - # iterate through the parameters on each sheet - for param, pval in self.parameters.iteritems(): - # try to apply units - try: - data[param] *= UREG(str(pval.get('units') or '')) - except TypeError: - continue - return data - - -class NumPyLoadTxtReader(DataReader): - """ - Read data using :func:`numpy.loadtxt` function. - - The :attr:`~DataReader.parameters` argument is a dictionary that must have - a "data" key. An additional "header" is optional; see :func:`_read_header`. - - The "data" key provides arguments to :func:`numpy.loadtxt`. The "dtype" key - must be specified, as names are required for all data in Carousel. Some - of the other :func:`numpy.loadtxt` arguments: "delimiter" and "skiprows" can - also be specified as keys. In addition "units" can also be specified in a - dictionary in which the keys are the names of the data output by - :func:`numpy.loadtxt`. Converters are not permitted. The "usecols" - argument is also not used since :func:`numpy.loadtxt` states that "the - number of columns used must match the number of fields in the data-type" - and "dtype" is already specified. The other arguments, "fname", "comments", - "unpack" and "ndmin" are also not used. - - Example of :attr:`~DataReader.parameters`:: - - parameters = { - 'header': { - 'delimiter': ',', - 'fields': [ - ['Name', 'str'], - ['Latitude', 'float', 'arcdegree'], - ['Longitude', 'float', 'arcdegree']]}, - 'data': { - 'dtype': [ - ['Date', '(3,)int'], ['Time', '(2,)int'], - ['GHI', 'float'], ['DNI', 'float'], ['DHI', 'float']], - 'units': { - 'GHI': 'W/m**2', 'DNI': 'W/m**2', 'DHI': 'W/m**2'}, - 'usecols': [0, 1, 4, 7, 10]}} - - This loads a header with 3 fields followed by 5 columns of data, converting - the 1st column, "Date", to a 3-element tuple of ``int`` and the 2nd column, - "Time", to a 2-element tuple of ``int``. - """ - - def load_data(self, filename, *args, **kwargs): - """ - load data from text file. - - :param filename: name of text file to read - :type filename: str - :returns: data read from file using :func:`numpy.loadtxt` - :rtype: dict - """ - # header keys - header_param = self.parameters.get('header') # default is None - # data keys - data_param = self.parameters['data'] # raises KeyError if no 'data' - dtype = data_param['dtype'] # raises KeyError if no 'dtype' - # convert to tuple and normal ASCII - _utf8_list_to_ascii_tuple(dtype) if dtype else None # -> tuple of str - delimiter = data_param.get('delimiter') # default is None - skiprows = data_param.get('skiprows') # default is None - data_units = data_param.get('units', {}) # default is an empty dict - data = {} # a dictionary for data - # open file for reading - with open(filename, 'r') as fid: - # read header - if header_param: - data.update(_read_header(fid, header_param)) - fid.seek(0) # move cursor back to beginning - # read data - data_data = np.loadtxt(fid, dtype, delimiter=delimiter, - skiprows=skiprows) - # apply units - data.update(_apply_units(data_data, data_units, fid.name)) - return data - - def apply_units_to_cache(self, data): - """ - Apply units to data originally loaded by :class:`NumPyLoadTxtReader`. - """ - return _apply_units_to_numpy_data_readers(self.parameters, data) - - -class NumPyGenFromTxtReader(DataReader): - """ - Read data using :func:`numpy.genfromtxt` function. - - The :attr:`~DataReader.parameters` argument is a dictionary that must have - a "data" key. An additional "header" is optional; see :func:`_read_header`. - - The "data" key provides arguments to :func:`numpy.genfromtxt`. Either the - "dtype" or "names" key must be specified, as names are required for all - data in Carousel. Some of the other :func:`numpy.genfromtxt` arguments: - "delimiter", "skip_header", "usecols", "excludelist" and "deletechars" can - also be specified as keys. In addition "units" can also be specified in a - dictionary in which the keys are the names of the data output by - :func:`numpy.genfromtxt`. Converters are not permitted. The other - arguments, "fname", "comments", "skip_footer", "missing_values", - "filling_values", "defaultfmt", "autostrip", "replace_space", - "case_sensitive", "unpack", "usemask" and "invalid_raise" are also not - used. - - If the data names are not specified in the "dtypes" key or "names" key, - then :meth:`~NumPyGenFromTxtReader.load_data` will raise an exception, - :exc:`~carousel.core.exceptions.UnnamedDataError`. - - .. seealso:: - `Importing data with genfromtxt \ - `_ - - Example of :attr:`~DataReader.parameters`:: - - parameters = { - 'header': { - 'delimiter': ' ', - 'fields': [ - ['city', 'str'], ['state', 'str'], - ["timezone", 'int'], ["elevation", 'int', 'meters']]}, - 'data': { - 'delimiter': 4, - 'names': ['DNI', 'DHI', 'GHI'], - 'units': {'DNI': 'W/m**2', 'DHI': 'W/m**2', 'GHI': 'W/m**2'}}} - - This loads a header that is delimited by whitespace, followed by data in - three fixed-width columns all 4-digit floats. - """ - - def load_data(self, filename, *args, **kwargs): - """ - load data from text file. - - :param filename: name of file to read - :type filename: str - :returns: data read from file using :func:`numpy.genfromtxt` - :rtype: dict - :raises: :exc:`~carousel.core.exceptions.UnnamedDataError` - """ - # header keys - header_param = self.parameters.get('header') # default is None - # data keys - data_param = self.parameters['data'] # raises KeyError if no 'data' - dtype = data_param.get('dtype') # default is None - # if not None convert to tuple and normal ASCII - _utf8_list_to_ascii_tuple(dtype) if dtype else None # -> tuple of str - delimiter = data_param.get('delimiter') # default is None - skip_header = data_param.get('skip_header') # default is None - usecols = data_param.get('usecols') # default is None - names = data_param.get('names') # default is None - names = [str(_) for _ in names] if names else None # -> str - excludelist = data_param.get('excludelist') # default is None - deletechars = data_param.get('deletechars') # default is None - data_units = data_param.get('units', {}) # default is an empty dict - # either dtype or names must be specified - if not (dtype or names): - raise UnnamedDataError(filename) - data = {} # a dictionary for data - # open file for reading - with open(filename, 'r') as fid: - # read header - if header_param: - data.update(_read_header(fid, header_param)) - fid.seek(0) # move cursor back to beginning - # data - data_data = np.genfromtxt(fid, dtype, delimiter=delimiter, - skip_header=skip_header, usecols=usecols, - names=names, excludelist=excludelist, - deletechars=deletechars) - # apply units - data.update(_apply_units(data_data, data_units, fid.name)) - return data - - def apply_units_to_cache(self, data): - """ - Apply units to data originally loaded by :class:`NumPyLoadTxtReader`. - """ - return _apply_units_to_numpy_data_readers(self.parameters, data) - - -def _apply_units_to_numpy_data_readers(parameters, data): - """ - Apply units to data originally loaded by :class:`NumPyLoadTxtReader` or - :class:`NumPyGenFromTxtReader`. - - :param parameters: Dictionary of data source parameters read from JSON - file. - :type parameters: dict - :param data: Dictionary of data read - """ - # apply header units - header_param = parameters.get('header') # default is None - # check for headers - if header_param: - fields = header_param['fields'] # header fields - # dictionary of header field parameters - header_fields = {field[0]: field[1:] for field in fields} - # loop over fieldnames - for k, val in header_fields.iteritems(): - # check for units in header field parameters - if len(val) > 1: - data[k] *= UREG(str(val[1])) # apply units - # apply other data units - data_units = parameters['data'].get('units') # default is None - if data_units: - for k, val in data_units.iteritems(): - data[k] *= UREG(str(val)) # apply units - return data - - -def _read_header(f, header_param): - """ - Read and parse data from 1st line of a file. - - :param f: :func:`file` or :class:`~StringIO.StringIO` object from which to - read 1st line. - :type f: file - :param header_param: Parameters used to parse the data from the header. - Contains "delimiter" and "fields". - :type header_param: dict - :returns: Dictionary of data read from header. - :rtype: dict - :raises: :exc:`~carousel.core.exceptions.UnnamedDataError` - - The **header_param** argument contains keys to read the 1st line of **f**. - If "delimiter" is ``None`` or missing, the default delimiter is a comma, - otherwise "delimiter" can be any single character, integer or sequence of - ``int``. - - * single character -- a delimiter - * single integer -- uniform fixed width - * sequence of ``int`` -- fixed widths, the number of fields should \ - correspond to the length of the sequence. - - The "fields" key is a list of (parameter-name, parameter-type[, parameter- - units]) lists. - """ - # default delimiter is a comma, can't be None - header_delim = str(header_param.get('delimiter', ',')) - # don't allow unnamed fields - if 'fields' not in header_param: - raise UnnamedDataError(f.name) - header_fields = {field[0]: field[1:] for field in header_param['fields']} - # header_names can't be generator b/c DictReader needs list, and can't be - # dictionary b/c must be same order as 'fields' to match data readby csv - header_names = [field[0] for field in header_param['fields']] - # read header - header_str = StringIO(f.readline()) # read the 1st line - # use csv because it will preserve quoted fields with commas - # make a csv.DictReader from header string, use header names for - # fieldnames and set delimiter to header delimiter - header_reader = csv.DictReader(header_str, header_names, - delimiter=header_delim, - skipinitialspace=True) - data = header_reader.next() # parse the header dictionary - # iterate over items in data - for k, v in data.iteritems(): - header_type = header_fields[k][0] # spec'd type - # whitelist header types - if isinstance(header_type, basestring): - if header_type.lower().startswith('int'): - header_type = int # coerce to integer - elif header_type.lower().startswith('long'): - header_type = long # coerce to long integer - elif header_type.lower().startswith('float'): - header_type = float # to floating decimal point - elif header_type.lower().startswith('str'): - header_type = str # coerce to string - elif header_type.lower().startswith('bool'): - header_type = bool # coerce to boolean - else: - raise TypeError('"%s" is not a supported type.' % header_type) - # WARNING! Use of `eval` considered harmful. `header_type` is read - # from JSON file, not secure input, could be used to exploit system - data[k] = header_type(v) # cast v to type - # check for units in 3rd element - if len(header_fields[k]) > 1: - units = UREG(str(header_fields[k][1])) # spec'd units - data[k] = data[k] * units # apply units - return data - - -def _apply_units(data_data, data_units, fname): - """ - Apply units to data. - - :param data_data: NumPy structured array with data from fname. - :type data_data: :class:`numpy.ndarray` - :param data_units: Units of fields in data_data. - :type data_units: dict - :param fname: Name of file from which data_data was read. - :type fname: str - :returns: Dictionary of data with units applied. - :rtype: dict - :raises: :exc:`~carousel.core.exceptions.UnnamedDataError` - """ - data_names = data_data.dtype.names - # raise error if NumPy data doesn't have names - if not data_names: - raise UnnamedDataError(fname) - data = dict.fromkeys(data_names) # dictionary of data read by NumPy - # iterate over data read by NumPy - for data_name in data_names: - if data_name in data_units: - # if units specified in parameters, then convert to string - units = str(data_units[data_name]) - data[data_name] = data_data[data_name] * UREG(units) - elif np.issubdtype(data_data[data_name].dtype, str): - # if no units specified and is string - data[data_name] = data_data[data_name].tolist() - else: - data[data_name] = data_data[data_name] - return data - - -def _utf8_list_to_ascii_tuple(utf8_list): - """ - Convert unicode strings in a list of lists to ascii in a list of tuples. - - :param utf8_list: A nested list of unicode strings. - :type utf8_list: list - """ - for n, utf8 in enumerate(utf8_list): - utf8_list[n][0] = str(utf8[0]) - utf8_list[n][1] = str(utf8[1]) - utf8_list[n] = tuple(utf8) - - -class ParameterizedXLS(XLRDReader): - """ - Concatenate data from parameterized sheets. - - :param parameters: Parameterization information. - - All data in parameterized sheets must be vectors of only numbers. - """ - def __init__(self, parameters): - #: parameterizaton information - self.parameterization = parameters - new_parameters = {} # empty dict for sheet parameters - parameter_sheets = self.parameterization['parameter']['sheets'] - for n, sheet in enumerate(parameter_sheets): - new_parameters[sheet] = {} # empty dictionary for sheet data - for k, v in self.parameterization['data'].iteritems(): - new_parameters[sheet][k + '_' + str(n)] = v - super(ParameterizedXLS, self).__init__(new_parameters) - # filename is instance attribute of XLRDReader - - def load_data(self, filename, *args, **kwargs): - """ - Load parameterized data from different sheets. - """ - # load parameterized data - data = super(ParameterizedXLS, self).load_data(filename) - # add parameter to data - parameter_name = self.parameterization['parameter']['name'] - parameter_values = self.parameterization['parameter']['values'] - parameter_units = str(self.parameterization['parameter']['units']) - data[parameter_name] = parameter_values * UREG(parameter_units) - # number of sheets - num_sheets = len(self.parameterization['parameter']['sheets']) - # parse and concatenate parameterized data - for key in self.parameterization['data']: - units = str(self.parameterization['data'][key].get('units')) or '' - datalist = [] - for n in xrange(num_sheets): - k = key + '_' + str(n) - datalist.append(data[k].reshape((1, -1))) - data.pop(k) # remove unused data keys - data[key] = np.concatenate(datalist, axis=0) * UREG(units) - return data - - def apply_units_to_cache(self, data): - """ - Apply units to :class:`ParameterizedXLS` data reader. - """ - # parameter - parameter_name = self.parameters['parameter']['name'] - parameter_units = str(self.parameters['parameter']['units']) - data[parameter_name] *= UREG(parameter_units) - # data - self.parameters.pop('parameter') - return super(ParameterizedXLS, self).apply_units_to_cache(data) - - -class MixedTextXLS(XLRDReader): - """ - Get parameters from cells mixed with text by matching regex pattern. - - :raises: :exc:`~carousel.core.exceptions.MixedTextNoMatchError` - - Use this reader for spreadsheets that have numerical data mixed with text. - It uses the same parameter file as :class:`XLRDReader` with two additional - keys: "pattern" and "method". The "pattern" must be a valid regex pattern. - Remember to escape backslashes. The "method" must be one of the following - regex methods from :mod:`re`: - - * :func:`~re.match` - * :func:`~re.search` - * :func:`~re.split` - * :func:`~re.findall` - - The default method is :func:`re.search` and the default pattern searches - for any number represented by the FORTRAN formatters "%e", "%E", "%f" or - "%g". This will find one number in any of the formats anywhere in the text - of the cell(s) read. - - Example:: - - { - "Sheet1": { - "sigma_bypass_diode": { - "range": [15, 1], - "pattern": - "\\w+ = ([-+]?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][-+]?\\d+)?)", - "method": "match" - }, - "B_bypass_diode": { - "range": [16, 1], - "method": "findall" - }, - "C_bypass_diode": { - "range": [17, 1], - "pattern": "\((\\d+), (\\d+), (\\d+)\)", - "method": "search" - }, - "cov_bypass_diode": { - "range": [18, 1], - "pattern": "[,;]", - "method": "split" - } - } - } - - These examples all read from "Sheet1". The first example matches one or - more alphanumeric characters at the beginning of the string set equal to an - integer, decimal or number in scientific notation, such as "Std = 0.4985" - from cell B16. The second example finds all numbers matching the default - pattern in cell B17. The third example searches for 3 integers in - parenthesis separated by commas anywhere in cell B18. The last example - splits a string delimited by commas and semicolons in cell B19. - - If no match is found then - :exc:`~carousel.core.exceptions.MixedTextNoMatchError` - is raised. Only numbers can be read, and any single-dimensions will be - squeezed out. For example scalars will become 0-d arrays. - """ - - def load_data(self, filename, *args, **kwargs): - """ - Load text data from different sheets. - """ - # load text data - data = super(MixedTextXLS, self).load_data(filename) - # iterate through sheets in parameters - for sheet_params in self.parameters.itervalues(): - # iterate through the parameters on each sheet - for param, pval in sheet_params.iteritems(): - pattern = pval.get('pattern', EFG_PATTERN) # get pattern - re_meth = pval.get('method', 'search') # get re method - # whitelist re methods, getattr could be considered harmful - if re_meth in RE_METH: - re_meth = getattr(re, pval.get('method', 'search')) - else: - msg = 'Only', '"%s", ' * len(RE_METH) % tuple(RE_METH) - msg += 'regex methods are allowed.' - raise AttributeError(msg) - # if not isinstance(data[param], basestring): - # re_meth = lambda p, dp: [re_meth(p, d) for d in dp] - match = re_meth(pattern, data[param]) # get matches - if match: - try: - match = match.groups() - except AttributeError: - match = [m.groups() for m in match] - npdata = np.array(match, dtype=float).squeeze() - data[param] = npdata * UREG(str(pval.get('units') or '')) - else: - raise MixedTextNoMatchError(re_meth, pattern, data[param]) - return data diff --git a/carousel/core/data_sources.py.bak b/carousel/core/data_sources.py.bak deleted file mode 100644 index bdec134..0000000 --- a/carousel/core/data_sources.py.bak +++ /dev/null @@ -1,303 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module provides base classes for data sources. Data sources provide -data to calculations. All data used comes from a data source. The requirements -for data sources are as follows: - -1. Data sources must be sub-classed to :class:`DataSource`. -2. They must know where to get their data, either from a file or from other - data sources. -3. They need a data reader that knows how to extract the data from the file, - or combine data in calculations to produce new data. -4. They require a parameter map that states exactly where the data is and what - its units are, what the data will be called in calculations and any other - meta-data the registry requires. -""" - -from carousel.core import ( - UREG, Registry, CarouselJSONEncoder, CommonBase, Parameter -) -from carousel.core.data_readers import JSONReader -from carousel.core.exceptions import ( - UncertaintyPercentUnitsError, UncertaintyVarianceError -) -import json -import os -import time -from copy import copy -import numpy as np - -DFLT_UNC = 1.0 * UREG('percent') # default uncertainty - - -class DataParameter(Parameter): - """ - Field for data parameters. - """ - _attrs = ['units', 'uncertainty', 'isconstant', 'timeseries'] - - -class DataRegistry(Registry): - """ - A registry for data sources. The meta names are: ``uncertainty``, - ``variance``, ``isconstant``, ``timeseries`` and ``data_source`` - """ - #: meta names - meta_names = ['uncertainty', 'variance', 'isconstant', 'timeseries', - 'data_source'] - - def register(self, newdata, *args, **kwargs): - """ - Register data in registry. Meta for each data is specified by positional - or keyword arguments after the new data and consists of the following: - - * ``uncertainty`` - Map of uncertainties in percent corresponding to new - keys. The uncertainty keys must be a subset of the new data keys. - * ``variance`` - Square of the uncertainty (no units). - * ``isconstant``: Map corresponding to new keys whose values are``True`` - if constant or ``False`` if periodic. These keys must be a subset of - the new data keys. - * ``timeseries``: Name of corresponding time series data, ``None`` if no - time series. _EG_: DNI data ``timeseries`` attribute might be set to a - date/time data that it corresponds to. More than one data can have the - same ``timeseries`` data. - * ``data_source``: the - :class:`~carousel.core.data_sources.DataSource` superclass that - was used to acquire this data. This can be used to group data from a - specific source together. - - :param newdata: New data to add to registry. When registering new data, - keys are not allowed to override existing keys in the data - registry. - :type newdata: mapping - :raises: - :exc:`~carousel.core.exceptions.UncertaintyPercentUnitsError` - """ - kwargs.update(zip(self.meta_names, args)) - # check uncertainty has units of percent - uncertainty = kwargs['uncertainty'] - variance = kwargs['variance'] - isconstant = kwargs['isconstant'] - # check uncertainty is percent - if uncertainty: - for k0, d in uncertainty.iteritems(): - for k1, v01 in d.iteritems(): - units = v01.units - if units != UREG('percent'): - keys = '%s-%s' % (k0, k1) - raise UncertaintyPercentUnitsError(keys, units) - # check variance is square of uncertainty - if variance and uncertainty: - for k0, d in variance.iteritems(): - for k1, v01 in d.iteritems(): - keys = '%s-%s' % (k0, k1) - missing = k1 not in uncertainty[k0] - v2 = np.asarray(uncertainty[k0][k1].to('fraction').m) ** 2.0 - if missing or not np.allclose(np.asarray(v01), v2): - raise UncertaintyVarianceError(keys, v01) - # check that isconstant is boolean - if isconstant: - for k, v in isconstant.iteritems(): - if not isinstance(v, bool): - classname = self.__class__.__name__ - error_msg = ['%s meta "isconstant" should be' % classname, - 'boolean, but it was "%s" for "%s".' % (v, k)] - raise TypeError(' '.join(error_msg)) - # call super method, meta must be passed as kwargs! - super(DataRegistry, self).register(newdata, **kwargs) - - -class DataSourceBase(CommonBase): - """ - Base data source meta class. - """ - _path_attr = 'data_path' - _file_attr = 'data_file' - _param_cls = DataParameter - _reader_attr = 'data_reader' - _enable_cache_attr = 'data_cache_enabled' - _attr_default = {_reader_attr: JSONReader, _enable_cache_attr: True} - - def __new__(mcs, name, bases, attr): - # use only with DataSource subclasses - if not CommonBase.get_parents(bases, DataSourceBase): - return super(DataSourceBase, mcs).__new__(mcs, name, bases, attr) - # set _meta combined from bases - attr = mcs.set_meta(bases, attr) - # set default meta attributes - meta = attr[mcs._meta_attr] - for ma, dflt in mcs._attr_default.iteritems(): - a = getattr(meta, ma, None) - if a is None: - setattr(meta, ma, dflt) - # set param file full path if data source path and file specified or - # try to set parameters from class attributes except private/magic - attr = mcs.set_param_file_or_parameters(attr) - return super(DataSourceBase, mcs).__new__(mcs, name, bases, attr) - - -class DataSource(object): - """ - Required interface for all Carousel data sources such as PVSim results, - TMY3 data and calculation input files. - - Each data source must specify a ``data_reader`` which must subclass - :class:`~carousel.core.data_readers.DataReader` and that can read this - data source. The default is - :class:`~carousel.core.data_readers.JSONReader`. - - Each data source must also specify a ``data_file`` and ``data_path`` that - contains the parameters required to import data from the data source using - the data reader. Each data reader had different parameters to specify how - it reads the data source, so consult the API. - - This is the required interface for all source files containing data used in - Carousel. - """ - __metaclass__ = DataSourceBase - - def __init__(self, *args, **kwargs): - # save arguments, might need them later - self.args = args #: positional arguments - self.kwargs = kwargs #: keyword arguments - # make pycharm by defining inferred objects - meta = getattr(self, DataSourceBase._meta_attr) - parameters = getattr(self, DataSourceBase._param_attr) - # check if the data reader is a file reader - filename = None - if meta.data_reader.is_file_reader: - # get filename from args or kwargs - if args: - filename = args[0] - elif kwargs: - filename = kwargs.get('filename') - # raises KeyError: 'filename' if filename isn't given - # TODO: allow user to set explicit filename for cache - #: filename of file containing data - self.filename = filename - # private property - self._is_saved = True - # If filename ends with ".json", then either the original reader was - # a JSONReader or the data was cached. - # If data caching enabled and file doesn't end with ".json", cache it as - # JSON, append ".json" to the original filename and pass original data - # reader as extra argument. - if meta.data_cache_enabled and self._is_cached(): - # switch reader to JSONReader, with old reader as extra arg - data_reader_instance = JSONReader(parameters, meta) - else: - # create the data reader object specified using parameter map - data_reader_instance = meta.data_reader(parameters, meta) - #: data loaded from reader - self.data = data_reader_instance.load_data(*args, **kwargs) - # save JSON file if doesn't exist already. JSONReader checks utc mod - # time vs orig file, and deletes JSON file if orig file is newer. - if meta.data_cache_enabled and not self._is_cached(): - self.saveas_json(self.filename) # ".json" appended by saveas_json - # XXX: default values of uncertainty, isconstant and timeseries are - # empty dictionaries. - #: data uncertainty in percent - self.uncertainty = {} - #: variance - self.variance = {} - #: ``True`` if data is constant for all dynamic calculations - self.isconstant = {} - #: name of corresponding time series data, ``None`` if no time series - self.timeseries = {} - #: name of :class:`DataSource` - self.data_source = dict.fromkeys(self.data, self.__class__.__name__) - # TODO: need a consistent way to handle uncertainty, isconstant and time - # series - # XXX: Each superclass should do the following: - # * prepare the raw data from reader for the registry. Some examples of - # data preparation are combining numbers and units and uncertainties, - # data validation, combining years, months, days and hours into - # datetime objects and parsing data from strings. - # * handle uncertainty, isconstant, timeseries and any other meta data. - self._raw_data = copy(self.data) # shallow copy of data - self.__prepare_data__() # prepare data for registry - # calculate variances - for k0, d in self.uncertainty.iteritems(): - for k1, v01 in d.iteritems(): - self.variance[k0] = {k1: v01.to('fraction').m ** 2.0} - - def __prepare_data__(self): - """ - Prepare raw data from reader for the registry. Some examples of data - preparation are combining numbers and units and uncertainties, data - validation, combining years, months, days and hours into datetime - objects and parsing data from strings. - - Each data superclass should implement this method. If there is no data - preparation then use ``pass``. - """ - raise NotImplementedError('Data preparation not implemented. ' + - 'Use ``pass`` if not required.') - - def _is_cached(self, ext='.json'): - """ - Determine if ``filename`` is cached using extension ``ex`` a string. - - :param ext: extension used to cache ``filename``, default is '.json' - :type ext: str - :return: True if ``filename`` is cached using extensions ``ex`` - :rtype: bool - """ - # extension must start with a dot - if not ext.startswith('.'): - # prepend extension with a dot - ext = '.%s' % ext - # cache file is filename with extension - cache_file = '%s%s' % (self.filename, ext) - # if filename already ends with extension or there's a file with the - # extension, then assume the data is cached - return self.filename.endswith(ext) or os.path.exists(cache_file) - - @property - def issaved(self): - return self._is_saved - - def saveas_json(self, save_name): - """ - Save :attr:`data`, :attr:`param_file`, original :attr:`data_reader` - and UTC modification time as keys in JSON file. If data is edited then - it should be saved using this method. Non-JSON data files are also - saved using this method. - - :param save_name: Name to save JSON file as, ".json" is appended. - :type save_name: str - """ - # make pycharm by defining inferred objects - meta = getattr(self, DataSourceBase._meta_attr) - param_file = getattr(self, DataSourceBase._param_file) - # JSONEncoder removes units and converts arrays to lists - # save last time file was modified - utc_mod_time = list(time.gmtime(os.path.getmtime(save_name))) - json_data = {'data': self.data, 'utc_mod_time': utc_mod_time, - 'param_file': param_file, - 'data_reader': meta.data_reader.__name__, - 'data_source': self.__class__.__name__} - if not save_name.endswith('.json'): - save_name += '.json' - with open(save_name, 'w') as fp: - json.dump(json_data, fp, cls=CarouselJSONEncoder) - # TODO: test file save successful - # TODO: need to update model - self._is_saved = True - - def edit(self, edits, data_reg): - """ - Edit data in :class:`Data_Source`. Sets :attr:`issaved` to ``False``. - """ - data_reg.update(edits) - self._is_saved = False - - def __getitem__(self, item): - return self.data[item] - - def __repr__(self): - parameters = getattr(self, DataSourceBase._param_attr) - fmt = ('<%s(' % self.__class__.__name__) - fmt += ', '.join('%s=%r' % (k, v) for k, v in parameters.iteritems()) - fmt += ')>' - return fmt diff --git a/carousel/core/formulas.py.bak b/carousel/core/formulas.py.bak deleted file mode 100644 index d9b5f0a..0000000 --- a/carousel/core/formulas.py.bak +++ /dev/null @@ -1,300 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -This module provides the framework for formulas. All formulas should inherit -from the Formula class in this module. Formula sources must include a -formula importer, or can subclass one of the formula importers here. -""" - -from carousel.core import logging, CommonBase, Registry, UREG, Parameter -import imp -import importlib -import os -import sys -import numexpr as ne -import inspect -from uncertainty_wrapper import unc_wrapper_args - -LOGGER = logging.getLogger(__name__) - - -class FormulaParameter(Parameter): - """ - Field for data parameters. - """ - _attrs = ['islinear', 'args', 'units', 'isconstant'] - - -class FormulaRegistry(Registry): - """ - A registry for formulas. The meta names are ``islinear``, ``args``, - ``units`` and ``isconstant``. - """ - meta_names = ['islinear', 'args', 'units', 'isconstant'] - - def register(self, new_formulas, *args, **kwargs): - """ - Register formula and meta data. - - * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. - * ``args`` - position of arguments - * ``units`` - units of returns and arguments as pair of tuples - * ``isconstant`` - constant arguments not included in covariance - - :param new_formulas: new formulas to add to registry. - """ - kwargs.update(zip(self.meta_names, args)) - # call super method, meta must be passed as kwargs! - super(FormulaRegistry, self).register(new_formulas, **kwargs) - - -class FormulaImporter(object): - """ - A class that imports formulas. - - :param parameters: Parameters used to import formulas. - :type parameters: dict - :param meta: Options for formulas and formula inporters - :type meta: Meta - """ - def __init__(self, parameters, meta=None): - #: parameters to be read by reader - self.parameters = parameters - #: options for importer - self.meta = meta - - def import_formulas(self): - """ - This method must be implemented by each formula importer. - - :returns: formulas - :rtype: dict - :raises: :exc:`~exceptions.NotImplementedError` - """ - raise NotImplementedError(' '.join(['Function "import_formulas" is', - 'not implemented.'])) - - -class PyModuleImporter(FormulaImporter): - """ - Import formulas from a Python module. - """ - def import_formulas(self): - """ - Import formulas specified in :attr:`parameters`. - - :returns: formulas - :rtype: dict - """ - # TODO: unit tests! - # TODO: move this to somewhere else and call it "importy", maybe - # core.__init__.py since a lot of modules might use it. - module = self.meta.module # module read from parameters - package = getattr(self.meta, 'package', None) # package read from meta - name = package + module if package else module # concat pkg + name - path = getattr(self.meta, 'path', None) # path read from parameters - # import module using module and package - mod = None - # SEE ALSO: http://docs.python.org/2/library/imp.html#examples - try: - # fast path: see if module was already imported - mod = sys.modules[name] - except KeyError: - try: - # import module specified in parameters - mod = importlib.import_module(module, package) - except ImportError as err: - if not path: - msg = ('%s could not be imported either because it was not ' - 'on the PYTHONPATH or path was not given.') - LOGGER.exception(msg, name) - raise err - else: - # import module using path - # expand ~, environmental variables and make path absolute - if not os.path.isabs(path): - path = os.path.expanduser(os.path.expandvars(path)) - path = os.path.abspath(path) - # paths must be a list - paths = [path] - # imp does not find hierarchical module names, find and load - # packages recursively, then load module, see last paragraph - # https://docs.python.org/2/library/imp.html#imp.find_module - pname = '' # full dotted name of package to load - # traverse namespace - while name: - # if dot in name get first package - if '.' in name: - pkg, name = name.split('.', 1) - else: - pkg, name = name, None # pkg is the module - # Find package or module by name and path - fp, filename, desc = imp.find_module(pkg, paths) - # full dotted name of package to load - pname = pkg if not pname else '%s.%s' % (pname, pkg) - LOGGER.debug('package name: %s', pname) - # try to load the package or module - try: - mod = imp.load_module(pname, fp, filename, desc) - finally: - if fp: - fp.close() - # append package paths for imp.find_module - if name: - paths = mod.__path__ - formulas = {} # an empty list of formulas - formula_param = self.parameters # formulas key - # FYI: iterating over dictionary is equivalent to iterkeys() - if isinstance(formula_param, (list, tuple, dict)): - # iterate through formulas - for f in formula_param: - formulas[f] = getattr(mod, f) - elif isinstance(formula_param, basestring): - # only one formula - # FYI: use basestring to test for str and unicode - # SEE: http://docs.python.org/2/library/functions.html#basestring - formulas[formula_param] = getattr(mod, formula_param) - else: - # autodetect formulas assuming first letter is f - formulas = {f: getattr(mod, f) for f in dir(mod) if f[:2] == 'f_'} - if not len(formulas): - for f in dir(mod): - mod_attr = getattr(mod, f) - if inspect.isfunction(mod_attr): - formulas[f] = mod_attr - return formulas - - -class NumericalExpressionImporter(FormulaImporter): - """ - Import formulas from numerical expressions using Python Numexpr. - """ - def import_formulas(self): - formulas = {} # an empty list of formulas - formula_param = self.parameters # formulas key - for f, p in formula_param.iteritems(): - formulas[f] = lambda *args: ne.evaluate( - p['extras']['expression'], - {k: a for k, a in zip(p['args'], args)}, {} - ).reshape(1, -1) - LOGGER.debug('formulas %s = %r', f, formulas[f]) - return formulas - - -class FormulaBase(CommonBase): - """ - Metaclass for formulas. - """ - _path_attr = 'formulas_path' - _file_attr = 'formulas_file' - _param_cls = FormulaParameter - - def __new__(mcs, name, bases, attr): - # use only with Formula subclasses - if not CommonBase.get_parents(bases, FormulaBase): - return super(FormulaBase, mcs).__new__(mcs, name, bases, attr) - # set _meta combined from bases - attr = mcs.set_meta(bases, attr) - # set param file full path if formulas path and file specified or - # try to set parameters from class attributes except private/magic - attr = mcs.set_param_file_or_parameters(attr) - return super(FormulaBase, mcs).__new__(mcs, name, bases, attr) - - -class Formula(object): - """ - A class for formulas. - - Specify ``formula_importer`` which must subclass :class:`FormulaImporter` - to import formula source files as class. If no ``formula_importer`` is - specified, the default is - :class:`~carousel.core.formulas.PyModuleImporter`. - - Specify ``formula_path`` and ``formula_file`` that contains formulas in - string form or parameters used to import the formula source file. - - This is the required interface for all source files containing formulas - used in Carousel. - """ - __metaclass__ = FormulaBase - - def __init__(self): - # check for path listed in param file - path = getattr(self._meta, 'path', None) - if path is None: - proxy_file = self.param_file if self.param_file else __file__ - # use the same path as the param file or this file if no param file - self._meta.path = os.path.dirname(proxy_file) - - # check for path listed in param file - formula_importer = getattr(self._meta, 'formula_importer', None) - if formula_importer is None: - #: formula importer class, default is ``PyModuleImporter`` - self._meta.formula_importer = PyModuleImporter - - meta = getattr(self, '_meta', None) # options for formulas - importer_instance = self._meta.formula_importer(self.parameters, meta) - #: formulas loaded by the importer using specified parameters - self.formulas = importer_instance.import_formulas() - #: linearity determined by each data source? - self.islinear = {} - #: positional arguments - self.args = {} - #: expected units of returns and arguments as pair of tuples - self.units = {} - #: constant arguments that are not included in covariance calculation - self.isconstant = {} - # sequence of formulas, don't propagate uncertainty or units - for f in self.formulas: - self.islinear[f] = True - self.args[f] = inspect.getargspec(self.formulas[f]).args - formula_param = self.parameters # formulas key - # if formulas is a list or if it can't be iterated as a dictionary - # then log warning and return - try: - formula_param_generator = formula_param.iteritems() - except AttributeError as err: - LOGGER.warning('Attribute Error: %s', err.message) - return - # formula dictionary - for k, v in formula_param_generator: - if not v: - # skip formula if attributes are null or empty - continue - # get islinear formula attribute - is_linear = v.get('islinear') - if is_linear is not None: - self.islinear[k] = is_linear - # get positional arguments - f_args = v.get('args') - if f_args is not None: - self.args[k] = f_args - # get constant arguments to exclude from covariance - self.isconstant[k] = v.get('isconstant') - if self.isconstant[k] is not None: - argn = [n for n, a in enumerate(self.args[k]) if a not in - self.isconstant[k]] - LOGGER.debug('%s arg nums: %r', k, argn) - self.formulas[k] = unc_wrapper_args(*argn)(self.formulas[k]) - # get units of returns and arguments - self.units[k] = v.get('units') - if self.units[k] is not None: - # append units for covariance and Jacobian if all args - # constant and more than one return output - if self.isconstant[k] is not None: - # check if retval units is a string or None before adding - # extra units for Jacobian and covariance - ret_units = self.units[k][0] - if isinstance(ret_units, basestring) or ret_units is None: - self.units[k][0] = [ret_units] - try: - self.units[k][0] += [None, None] - except TypeError: - self.units[k][0] += (None, None) - # wrap function with Pint's unit wrapper - self.formulas[k] = UREG.wraps(*self.units[k])( - self.formulas[k] - ) - - def __getitem__(self, item): - return self.formulas[item] diff --git a/carousel/core/layers.py.bak b/carousel/core/layers.py.bak deleted file mode 100644 index cd83432..0000000 --- a/carousel/core/layers.py.bak +++ /dev/null @@ -1,408 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This is the Layers module. There are five layers in a Carousel model: - -* Data -* Formulas -* Calculations -* Outputs -* Simulations - -Layers are used to assemble the model. For example, the data layer assembles -all of the :ref:`data-sources`, calling the :ref:`data-readers` and putting all -of the data (and meta) into the -:class:`~carousel.core.data_sources.DataRegistry`. - -In general all model layers have add, open and -:meth:`~carousel.core.layers.Layer.load` methods. The add method adds -a particular format such as a -:class:`~carousel.core.data_sources.DataSource`. The open method gets -data from a file in the format that was added. The -:meth:`~carousel.core.layers.Layer.load` method loads the layer into -the model. The :meth:`~carousel.core.layers.Layer.load` method must -be implemented in each subclass of -:class:`~carousel.core.layers.Layer` or -:exc:`~exceptions.NotImplementedError` is raised. -""" - -import importlib -import os -from carousel.core import logging, warnings -from carousel.core.simulations import SimRegistry, Simulation -from carousel.core.data_sources import DataRegistry, DataSource -from carousel.core.formulas import FormulaRegistry, Formula -from carousel.core.calculations import CalcRegistry, Calc -from carousel.core.outputs import OutputRegistry, Output - -LOGGER = logging.getLogger(__name__) -SIMFILE_LOAD_WARNING = ' '.join([ - 'Use of "filename" or "path" in model for simulation is deprecated.', - 'This will raise an exception in the future.' -]) - - -class Layer(object): - """ - A layer in the model. - - :param sources: Dictionary of model parameters specific to this layer. - :type sources: dict - """ - reg_cls = NotImplemented #: registry class - src_cls = NotImplemented #: source class - - def __init__(self, sources=None): - #: dictionary of layer sources - self.layer = sources - #: dictionary of layer source classes added to the layer - self.sources = {} - #: dictionary of source class instances added to the layer - self.objects = {} - #: registry of items contained in this layer - self.reg = self.reg_cls() - - def add(self, src_cls, module, package=None): - """ - Add layer class to model. This method may be overloaded by layer. - - :param src_cls: layer class to add, should not start with underscores - :type src_cls: str - :param module: Python module that contains layer class - :type module: str - :param package: optional package containing module with layer class - :type package: str - :raises: :exc:`~exceptions.NotImplementedError` - """ - # import module containing the layer class - mod = importlib.import_module(module, package) - # get layer class definition from the module - self.sources[src_cls] = getattr(mod, src_cls) - - def load(self, relpath=None): - """ - Load the layer from the model data. This method must be implemented by - each layer. - - :param relpath: alternate path if specified path is missing or ``None`` - :raises: :exc:`~exceptions.NotImplementedError` - """ - raise NotImplementedError('load') - - def delete(self, src_cls): - """ - Delete layer source class from layer. - :param src_cls: layer source class to delete. - :raises: :exc:`~exceptions.NotImplementedError` - """ - raise NotImplementedError('delete') - - def edit(self, src_cls, value): - """ - Edit layer source class with value. - - :param src_cls: layer source class to edit - :type src_cls: str - :param value: new value of layer source class - :raises: :exc:`~exceptions.NotImplementedError` - """ - raise NotImplementedError('delete') - - -class Data(Layer): - """ - The Data layer of the model. - - The :attr:`~Layer.layer` attribute is a dictionary of data sources names - as keys of dictionaries for each data source with the module and optionally - the package containing the module, the filename, which can be ``None``, - containing specific data for the data source and an optional path to the - data file. If the path is ``None``, then the default path for data internal - to Carousel is used. External data files should specify the path. - """ - reg_cls = DataRegistry #: data layer registry - src_cls = DataSource #: data layer source - - def add(self, data_source, module, package=None): - """ - Add data_source to model. Tries to import module, then looks for data - source class definition. - - :param data_source: Name of data source to add. - :type data_source: str - :param module: Module in which data source resides. Can be absolute or - relative. See :func:`importlib.import_module` - :type module: str - :param package: Optional, but must be used if module is relative. - :type package: str - - .. seealso:: - :func:`importlib.import_module` - """ - super(Data, self).add(data_source, module, package) - # only update layer info if it is missing! - if data_source not in self.layer: - # copy data source parameters to :attr:`Layer.layer` - self.layer[data_source] = {'module': module, 'package': package} - # add a place holder for the data source object when it's constructed - self.objects[data_source] = None - - def open(self, data_source, *args, **kwargs): - """ - Open filename to get data for data_source. - - :param data_source: Data source for which the file contains data. - :type data_source: str - - Positional and keyword arguments can contain either the data to use for - the data source or the full path of the file which contains data for the - data source. - """ - if self.sources[data_source]._meta.data_reader.is_file_reader: - filename = kwargs.get('filename') - path = kwargs.get('path', '') - rel_path = kwargs.get('rel_path', '') - if len(args) > 0: - filename = args[0] - if len(args) > 1: - path = args[1] - if len(args) > 2: - rel_path = args[2] - args = () - kwargs = {'filename': os.path.join(rel_path, path, filename)} - LOGGER.debug('filename: %s', kwargs['filename']) - # call constructor of data source with filename argument - self.objects[data_source] = self.sources[data_source](*args, **kwargs) - # register data and uncertainty in registry - data_src_obj = self.objects[data_source] - meta = [getattr(data_src_obj, m) for m in self.reg.meta_names] - self.reg.register(data_src_obj.data, *meta) - - def load(self, rel_path=None): - """ - Add data_sources to layer and open files with data for the data_source. - """ - for k, v in self.layer.iteritems(): - self.add(k, v['module'], v.get('package')) - filename = v.get('filename') - path = v.get('path') - if filename: - # default path for data is in ../data - if not path: - path = rel_path - else: - path = os.path.join(rel_path, path) - # filename can be a list or a string, concatenate list with - # os.pathsep and append the full path to strings. - if isinstance(filename, basestring): - filename = os.path.join(path, filename) - else: - file_list = [os.path.join(path, f) for f in filename] - filename = os.path.pathsep.join(file_list) - self.open(k, filename) - - def edit(self, data_src, value): - """ - Edit data layer. - - :param data_src: Name of :class:`DataSource` to edit. - :type data_src: str - :param value: Values to edit. - :type value: dict - """ - # check if opening file - if 'filename' in value: - items = [k for k, v in self.reg.data_source.iteritems() if - v == data_src] - self.reg.unregister(items) # remove items from Registry - # open file and register new data - self.open(data_src, value['filename'], value.get('path')) - self.layer[data_src].update(value) # update layer with new items - - def delete(self, data_src): - """ - Delete data sources. - """ - items = self.objects[data_src].data.keys() # items to edit - self.reg.unregister(items) # remove items from Registry - self.layer.pop(data_src) # remove data source from layer - self.objects.pop(data_src) # remove data_source object - self.sources.pop(data_src) # remove data_source object - - -class Formulas(Layer): - """ - Layer containing formulas. - """ - reg_cls = FormulaRegistry #: formula layer registry - src_cls = Formula #: formula layer source - - def add(self, formula, module, package=None): - """ - Import module (from package) with formulas, import formulas and add - them to formula registry. - - :param formula: Name of the formula source to add/open. - :param module: Module containing formula source. - :param package: [Optional] Package of formula source module. - - .. seealso:: - :func:`importlib.import_module` - """ - super(Formulas, self).add(formula, module, package) - # only update layer info if it is missing! - if formula not in self.layer: - # copy formula source parameters to :attr:`Layer.layer` - self.layer[formula] = {'module': module, 'package': package} - self.objects[formula] = self.sources[formula]() - # register formula and linearity in registry - formula_src_obj = self.objects[formula] - meta = [getattr(formula_src_obj, m) for m in self.reg.meta_names] - self.reg.register(formula_src_obj.formulas, *meta) - - def open(self, formula, module, package=None): - self.add(formula, module, package=package) - - def load(self, _=None): - """ - Add formulas to layer. - """ - for k, v in self.layer.iteritems(): - self.add(k, v['module'], v.get('package')) - - def edit(self, src_cls, value): - pass - - def delete(self, src_cls): - pass - - -class Calculations(Layer): - """ - Layer containing formulas. - """ - reg_cls = CalcRegistry #: calculations layer registry - src_cls = Calc #: calculation layer source - - def add(self, calc, module, package=None): - """ - Add calc to layer. - """ - super(Calculations, self).add(calc, module, package) - # only update layer info if it is missing! - if calc not in self.layer: - # copy calc source parameters to :attr:`Layer.layer` - self.layer[calc] = {'module': module, 'package': package} - # instantiate the calc object - self.objects[calc] = self.sources[calc]() - # register calc and dependencies in registry - calc_src_obj = self.objects[calc] - meta = [getattr(calc_src_obj, m) for m in self.reg.meta_names] - self.reg.register(calc_src_obj.calcs, *meta) - - def open(self, calc, module, package=None): - self.add(calc, module, package=package) - - def load(self, _=None): - """ - Add calcs to layer. - """ - for k, v in self.layer.iteritems(): - self.add(k, v['module'], v.get('package')) - - def edit(self, src_cls, value): - pass - - def delete(self, src_cls): - pass - - -class Outputs(Layer): - """ - Layer containing output sources. - """ - reg_cls = OutputRegistry #: output layer registry - src_cls = Output #: output layer source - - def add(self, output, module, package=None): - """ - Add output to - """ - super(Outputs, self).add(output, module, package) - # only update layer info if it is missing! - if output not in self.layer: - # copy output source parameters to :attr:`Layer.layer` - self.layer[output] = {'module': module, 'package': package} - # instantiate the output object - self.objects[output] = self.sources[output]() - # register outputs and meta-data in registry - out_src_obj = self.objects[output] - meta = [getattr(out_src_obj, m) for m in self.reg.meta_names] - self.reg.register(out_src_obj.outputs, *meta) - - def open(self, output, module, package=None): - self.add(output, module, package=package) - - def load(self, _=None): - """ - Add output_source to layer. - """ - for k, v in self.layer.iteritems(): - self.add(k, v['module'], v.get('package')) - - def edit(self, src_cls, value): - pass - - def delete(self, src_cls): - pass - - -class Simulations(Layer): - """ - Layer containing simulation sources. - """ - reg_cls = SimRegistry #: simulation layer registry - src_cls = Simulation #: simulation layer source - - def add(self, sim, module, package=None): - """ - Add simulation to layer. - """ - super(Simulations, self).add(sim, module, package) - # only update layer info if it is missing! - if sim not in self.layer: - # copy simulation source parameters to :attr:`Layer.layer` - self.layer[sim] = {'module': module, 'package': package} - - def open(self, sim, filename=None): - # call constructor of sim source with filename argument - self.objects[sim] = self.sources[sim](filename) - # register simulations in registry, the only reason to register an item - # is make sure it doesn't overwrite other items - sim_src_obj = self.objects[sim] - meta = [{str(sim): getattr(sim_src_obj, m)} for m in - self.reg.meta_names] - self.reg.register({sim: sim_src_obj}, *meta) - - def load(self, rel_path=None): - """ - Add sim_src to layer. - """ - for k, v in self.layer.iteritems(): - self.add(k, v['module'], v.get('package')) - filename = v.get('filename') - path = v.get('path') - if filename: - warnings.warn(DeprecationWarning(SIMFILE_LOAD_WARNING)) - # default path for data is in ../simulations - if not path: - path = rel_path - else: - path = os.path.join(rel_path, path) - filename = os.path.join(path, filename) - self.open(k, filename) - - def edit(self, src_cls, value): - pass - - def delete(self, src_cls): - pass diff --git a/carousel/core/models.py.bak b/carousel/core/models.py.bak deleted file mode 100644 index 1d1008f..0000000 --- a/carousel/core/models.py.bak +++ /dev/null @@ -1,344 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This is the Carousel :mod:`~carousel.core.models` module that contains -definitions for the :class:`~carousel.core.models.Model` class. - -The Carousel model contains five layers: -:class:`~carousel.core.layers.Data`, -:class:`~carousel.core.layers.Formulas`, -:class:`~carousel.core.layers.Calculations`, -:class:`~carousel.core.layers.Outputs` and -:class:`~carousel.core.layers.Simulations`. The -:class:`~carousel.core.layers.Data` layer organizes -:ref:`data-sources` by providing methods to add and load data for Carousel. -The :class:`~carousel.core.layers.Formulas` layer loads -:ref:`formulas` used by :class:`~carousel.core.layers.Calculations` -calculations. The :class:`~carousel.core.layers.Outputs` layer -organizes the calculated outputs for use in other calculations. Finally the -:class:`~carousel.core.layers.Simulations` layer organizes -options such as how long the simulation should run and takes care of actually -running the simulation. -""" - -import importlib -import json -import os -import copy -from carousel.core import logging, _listify, CommonBase, Parameter - -LOGGER = logging.getLogger(__name__) -LAYERS_MOD = '.layers' -LAYERS_PKG = 'carousel.core' -LAYER_CLS_NAMES = {'data': 'Data', 'calculations': 'Calculations', - 'formulas': 'Formulas', 'outputs': 'Outputs', - 'simulations': 'Simulations'} - - -class ModelParameter(Parameter): - _attrs = ['layer', 'module', 'package', 'path', 'sources'] - - -class ModelBase(CommonBase): - """ - Base model meta class. If model has class attributes "modelpath" and - "modelfile" then layer class names and model configuration will be read from - the file on that path. Otherwise layer class names will be read from the - class attributes. - """ - _path_attr = 'modelpath' - _file_attr = 'modelfile' - _param_cls = ModelParameter - _layers_cls_attr = 'layer_cls_names' - _layers_mod_attr = 'layers_mod' - _layers_pkg_attr = 'layers_pkg' - _cmd_layer_attr = 'cmd_layer_name' - _attr_default = { - _layers_cls_attr: LAYER_CLS_NAMES, _layers_mod_attr: LAYERS_MOD, - _layers_pkg_attr: LAYERS_PKG, _cmd_layer_attr: 'simulations' - } - - def __new__(mcs, name, bases, attr): - # use only with Model subclasses - if not CommonBase.get_parents(bases, ModelBase): - return super(ModelBase, mcs).__new__(mcs, name, bases, attr) - attr = mcs.set_meta(bases, attr) - # set param file full path if data source path and file specified or - # try to set parameters from class attributes except private/magic - attr = mcs.set_param_file_or_parameters(attr) - # set default meta attributes - meta = attr[mcs._meta_attr] - for ma, dflt in mcs._attr_default.iteritems(): - a = getattr(meta, ma, None) - if a is None: - setattr(meta, ma, dflt) - return super(ModelBase, mcs).__new__(mcs, name, bases, attr) - - -class Model(object): - """ - A class for models. Carousel is a subclass of the :class:`Model` class. - - :param modelfile: The name of the JSON file with model data. - :type modelfile: str - """ - __metaclass__ = ModelBase - - def __init__(self, modelfile=None): - meta = getattr(self, ModelBase._meta_attr) - parameters = getattr(self, ModelBase._param_attr) - # load modelfile if it's an argument - if modelfile is not None: - #: model file - self.param_file = os.path.abspath(modelfile) - LOGGER.debug('modelfile: %s', modelfile) - else: - modelfile = self.param_file - # check meta class for model if declared inline - if parameters: - # TODO: separate model and parameters according to comments in #78 - #: dictionary of the model - self.model = model = copy.deepcopy(parameters) - else: - #: dictionary of the model - self.model = model = None - # layer attributes initialized in meta class or _initialize() - # for k, v in layer_cls_names.iteritems(): - # setattr(self, k, v) - # XXX: this seems bad to initialize attributes outside of constructor - #: dictionary of model layer classes - self.layers = {} - #: state of model, initialized or uninitialized - self._state = 'uninitialized' - # need either model file or model and layer class names to initialize - ready_to_initialize = ((modelfile is not None or model is not None) and - meta.layer_cls_names is not None) - if ready_to_initialize: - self._initialize() # initialize using modelfile or model - - @property - def state(self): - """ - current state of the model - """ - return self._state - - def _load(self, layer=None): - """ - Load or update all or part of :attr:`model`. - - :param layer: Optionally load only specified layer. - :type layer: str - """ - # open model file for reading and convert JSON object to dictionary - # read and load JSON parameter map file as "parameters" - with open(self.param_file, 'r') as param_file: - file_params = json.load(param_file) - for layer, params in file_params.iteritems(): - # update parameters from file - self.parameters[layer] = ModelParameter(**params) - # if layer argument spec'd then only update/load spec'd layer - if not layer or not self.model: - # update/load model if layer not spec'd or if no model exists yet - # TODO: separate model and parameters according to comments in #78 - self.model = copy.deepcopy(self.parameters) - else: - # convert non-sequence to tuple - layers = _listify(layer) - # update/load layers - for layer in layers: - self.model[layer] = copy.deepcopy(self.parameters[layer]) - - def _update(self, layer=None): - """ - Update layers in model. - """ - meta = getattr(self, ModelBase._meta_attr) - if not layer: - layers = self.layers - else: - # convert non-sequence to tuple - layers = _listify(layer) - for layer in layers: - # relative path to layer files from model file - path = os.path.abspath(os.path.join(meta.modelpath, layer)) - getattr(self, layer).load(path) - - def _initialize(self): - """ - Initialize model and layers. - """ - meta = getattr(self, ModelBase._meta_attr) - # read modelfile, convert JSON and load/update model - if self.param_file is not None: - self._load() - LOGGER.debug('model:\n%r', self.model) - # initialize layers - # FIXME: move import inside loop for custom layers in different modules - mod = importlib.import_module(meta.layers_mod, meta.layers_pkg) - src_model = {} - for layer, value in self.model.iteritems(): - # from layers module get the layer's class definition - layer_cls = getattr(mod, meta.layer_cls_names[layer]) # class def - self.layers[layer] = layer_cls # add layer class def to model - # check if model layers are classes - src_value = {} # layer value generated from source classes - for src in value['sources']: - # check if source has keyword arguments - try: - src, kwargs = src - except (TypeError, ValueError): - kwargs = {} # no key work arguments - # skip if not a source class - if isinstance(src, basestring): - continue - # generate layer value from source class - src_value[src.__name__] = {'module': src.__module__, - 'package': None} - # update layer keyword arguments - src_value[src.__name__].update(kwargs) - # use layer values generated from source class - if src_value: - value = src_model[layer] = src_value - else: - srcmod, srcpkg = value.get('module'), value.get('package') - try: - value = dict(value['sources']) - except ValueError: - value = dict.fromkeys(value['sources'], {}) - for src in value.viewkeys(): - if srcmod is not None: - value[src]['module'] = srcmod - if srcpkg is not None: - value[src]['package'] = srcpkg - # set layer attribute with model data - setattr(self, layer, layer_cls(value)) - # update model with layer values generated from source classes - if src_model: - self.model.update(src_model) - self._update() - self._state = 'initialized' - - def load(self, modelfile, layer=None): - """ - Load or update a model or layers in a model. - - :param modelfile: The name of the json file to load. - :type modelfile: str - :param layer: Optionally load only specified layer. - :type layer: str - """ - # read modelfile, convert JSON and load/update model - self.param_file = modelfile - self._load(layer) - self._update(layer) - - def edit(self, layer, item, delete=False): - """ - Edit model. - - :param layer: Layer of model to edit - :type layer: str - :param item: Items to edit. - :type item: dict - :param delete: Flag to return - :class:`~carousel.core.layers.Layer` to delete item. - :type delete: bool - """ - # get layer attribute with model data - if hasattr(self, layer): - layer_obj = getattr(self, layer) - else: - raise AttributeError('missing layer: %s', layer) - if delete: - return layer_obj - # iterate over items and edit layer - for k, v in item.iteritems(): - if k in layer_obj.layer: - layer_obj.edit(k, v) # edit layer - else: - raise AttributeError('missing layer item: %s', k) - # update model data - if k in self.model[layer]: - self.model[layer][k].update(v) - else: - raise AttributeError('missing model layer item: %s', k) - - def add(self, layer, items): - """ - Add items in model. - """ - for k in items.iterkeys(): - if k in self.model[layer]: - raise Exception('item %s is already in layer %s' % (k, layer)) - self.model[layer].update(items) - # this should also update Layer.layer, the layer data - # same as calling layer constructor - # so now just need to add items to the layer - for k, v in items.iteritems(): - getattr(self, layer).add(k, v['module'], v.get('package')) - - def delete(self, layer, items): - """ - Delete items in model. - """ - # Use edit to get the layer obj containing item - items = _listify(items) # make items a list if it's not - layer_obj = self.edit(layer, dict.fromkeys(items), delete=True) - for k in items: - if k in layer_obj.layer: - layer_obj.delete(k) - else: - raise AttributeError('item %s missing from layer %s' % - (k, layer)) - # don't need to pop items from self.model, because, self.layer - # points to the same object as the item in model! - # for example: - # (Pdb) id(self.model['data']) # same ID as layer in data - # 125639560L - # (Pdb) id(self.data.layer) # same ID as data in model - # 125639560L - - def save(self, modelfile, layer=None): - """ - Save a model file. - - :param modelfile: The name of the json file to save. - :type modelfile: str - :param layer: Optionally save only specified layer. - :type layer: str - """ - if layer: - obj = {layer: self.model[layer]} - else: - obj = self.model - with open(modelfile, 'w') as fp: - json.dump(obj, fp, indent=2, sort_keys=True) - - @property - def registries(self): - return {layer: getattr(self, layer).reg - for layer in self.layers} - - @property - def cmd_layer(self): - meta = getattr(self, ModelBase._meta_attr) - return getattr(self, meta.cmd_layer_name, NotImplemented) - - @property - def commands(self): - return self.cmd_layer.reg.commands - - def command(self, cmd, progress_hook=None, *args, **kwargs): - """ - Execute a model command. - - :param cmd: Name of the command. - :param progress_hook: A function to which progress updates are passed. - """ - cmds = cmd.split(None, 1) # split commands and simulations - sim_names = cmds[1:] # simulations - if not sim_names: - sim_names = self.cmd_layer.reg.iterkeys() - for sim_name in sim_names: - sim_cmd = getattr(self.cmd_layer.reg[sim_name], cmd) - sim_cmd(self, progress_hook=progress_hook, *args, **kwargs) diff --git a/carousel/core/outputs.py.bak b/carousel/core/outputs.py.bak deleted file mode 100644 index 1288aa1..0000000 --- a/carousel/core/outputs.py.bak +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -This module provides the framework for output from Carousel. It is similar -to the data layer except output sources are always calculations. -""" - -from carousel.core import logging, CommonBase, UREG, Q_, Registry, Parameter -import json -import numpy as np - -LOGGER = logging.getLogger(__name__) - - -class OutputParameter(Parameter): - """ - Fields for outputs. - """ - _attrs = ['units', 'init', 'size', 'isconstant', 'isproperty', 'timeseries'] - - -class OutputRegistry(Registry): - """ - A registry for output from calculations. - """ - meta_names = [ - 'initial_value', 'size', 'uncertainty', 'variance', 'jacobian', - 'isconstant', 'isproperty', 'timeseries', 'output_source' - ] - - def register(self, new_outputs, *args, **kwargs): - """ - Register outputs and metadata. - - * ``initial_value`` - used in dynamic calculations - * ``size`` - number of elements per timestep - * ``uncertainty`` - in percent of nominal value - * ``variance`` - dictionary of covariances, diagonal is square of - uncertianties, no units - * ``jacobian`` - dictionary of sensitivities dxi/dfj - * ``isconstant`` - ``True`` if constant, ``False`` if periodic - * ``isproperty`` - ``True`` if output stays at last value during - thresholds, ``False`` if reverts to initial value - * ``timeseries`` - name of corresponding time series output, ``None`` if - no time series - * ``output_source`` - name - - :param new_outputs: new outputs to register. - """ - kwargs.update(zip(self.meta_names, args)) - # call super method - super(OutputRegistry, self).register(new_outputs, **kwargs) - - -class OutputBase(CommonBase): - """ - Metaclass for outputs. - - Setting the ``__metaclass__`` attribute to :class:`OutputBase` adds the - full path to the specified output parameter file as ``param_file`` or - adds ``parameters`` with outputs specified. Also checks that outputs is a - subclass of :class:`Output`. Sets `output_path` and `output_file` as the - class attributes that specify the parameter file full path. - """ - _path_attr = 'outputs_path' - _file_attr = 'outputs_file' - _param_cls = OutputParameter - - def __new__(mcs, name, bases, attr): - # use only with Output subclasses - if not CommonBase.get_parents(bases, OutputBase): - return super(OutputBase, mcs).__new__(mcs, name, bases, attr) - # set _meta combined from bases - attr = mcs.set_meta(bases, attr) - # set param file full path if outputs path and file specified or - # try to set parameters from class attributes except private/magic - attr = mcs.set_param_file_or_parameters(attr) - return super(OutputBase, mcs).__new__(mcs, name, bases, attr) - - -class Output(object): - """ - A class for formatting outputs. - - Do not use this class directly. Instead subclass it in your output model and - list the path and file of the outputs parameters or provide the parameters - as class members. - - Example of specified output parameter file:: - - import os - - PROJ_PATH = os.path.join('project', 'path') # project path - - - class PVPowerOutputs(Output): - outputs_file = 'pvpower.json' - outputs_path = os.path.join(PROJ_PATH, 'outputs') - - Example of specified output parameters:: - - class PVPowerOutputs(Output): - hourly_energy = {'init': 0, 'units': 'Wh', 'size': 8760} - yearly_energy = {'init': 0, 'units': 'kWh'} - """ - __metaclass__ = OutputBase - - def __init__(self): - #: outputs initial value - self.initial_value = {} - #: size of outputs - self.size = {} - #: outputs uncertainty - self.uncertainty = {} - #: variance - self.variance = {} - #: jacobian - self.jacobian = {} - #: outputs isconstant flag - self.isconstant = {} - #: outputs isproperty flag - self.isproperty = {} - #: name of corresponding time series, ``None`` if no time series - self.timeseries = {} - #: name of :class:`Output` superclass - self.output_source = {} - #: calculation outputs - self.outputs = {} - for k, v in self.parameters.iteritems(): - self.initial_value[k] = v.get('init') # returns None if missing - self.size[k] = v.get('size') or 1 # minimum size is 1 - self.uncertainty[k] = None # uncertainty for outputs is calculated - self.isconstant[k] = v.get('isconstant', False) # True or False - self.isproperty[k] = v.get('isproperty', False) # True or False - units = str(v.get('units', '')) # default is non-dimensional - # NOTE: np.empty is faster than zeros! - self.outputs[k] = Q_(np.zeros((1, self.size[k])), UREG(units)) - # NOTE: Initial values are assigned and outputs resized when - # simulation "start" method is called from the model. - self.timeseries[k] = v.get('timeseries') # None if not time series - self.output_source[k] = self.__class__.__name__ # output source diff --git a/carousel/core/simulations.py.bak b/carousel/core/simulations.py.bak deleted file mode 100644 index 737e5d2..0000000 --- a/carousel/core/simulations.py.bak +++ /dev/null @@ -1,555 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This is the Simulation module. The Simulation layer takes of creating output -variables, writing data to disk, iterating over data and calculations at -each interval in the simulation and setting any parameters required to perform -the simulation. It gets all its info from the model, which in turn gets it from -each layer which gets info from the layers' sources. -""" - -from carousel.core import logging, CommonBase, Registry, UREG, Q_, Parameter -from carousel.core.exceptions import CircularDependencyError, MissingDataError -import json -import errno -import os -import sys -import numpy as np -import Queue -import functools -from datetime import datetime - -LOGGER = logging.getLogger(__name__) - - -def mkdir_p(path): - """ - http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python - :param path: path to make recursively - """ - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise exc - - -def id_maker(obj): - """ - Makes an ID from the object's class name and the datetime now in ISO format. - - :param obj: the class from which to make the ID - :return: ID - """ - dtfmt = '%Y%m%d-%H%M%S' - return '%s-%s' % (obj.__class__.__name__, datetime.now().strftime(dtfmt)) - - -def sim_progress_hook(format_args, display_header=False): - if isinstance(format_args, basestring): - format_str = '---------- %s ----------\n' - else: - idx = format_args[0] - fields, values = zip(*format_args[1:]) - format_str = '\r%5d' + ' %10.4g' * len(values) - if display_header: - units = (str(v.dimensionality) for v in values) - units = tuple(['n/d' if u == 'dimensionless' else u - for u in units]) - format_args = fields + units + (idx,) + values - format_units = ('units' + ' %10s' * len(units)) + '\n' - fmt_header = ('index' + ' %10s' * len(fields)) + '\n' - format_str = fmt_header + format_units + format_str - else: - format_args = (idx,) + values - sys.stdout.write(format_str % format_args) - - -def topological_sort(dag): - """ - topological sort - - :param dag: directed acyclic graph - :type dag: dict - - .. seealso:: `Topographical Sorting - `_, - `Directed Acyclic Graph (DAG) - `_ - """ - # find all edges of dag - topsort = [node for node, edge in dag.iteritems() if not edge] - # loop through nodes until topologically sorted - while len(topsort) < len(dag): - num_nodes = len(topsort) # number of nodes - # unsorted nodes - for node in dag.viewkeys() - set(topsort): - # nodes with no incoming edges - if set(dag[node]) <= set(topsort): - topsort.append(node) - break - # circular dependencies - if len(topsort) == num_nodes: - raise CircularDependencyError(dag.viewkeys() - set(topsort)) - return topsort - - -class SimParameter(Parameter): - _attrs = ['ID', 'path', 'commands', 'data', 'thresholds', 'interval', - 'sim_length', 'display_frequency', 'display_fields', - 'write_frequency', 'write_fields'] - - -class SimRegistry(Registry): - """ - Registry for simulations. - """ - #: meta names - meta_names = ['commands'] - - def register(self, sim, *args, **kwargs): - """ - register simulation and metadata. - - * ``commands`` - list of methods to callable from model - - :param sim: new simulation - """ - kwargs.update(zip(self.meta_names, args)) - # call super method, now meta can be passed as args or kwargs. - super(SimRegistry, self).register(sim, **kwargs) - - -class SimBase(CommonBase): - """ - Meta class for simulations. - """ - _path_attr = 'sim_path' - _file_attr = 'sim_file' - _attributes = 'attrs' - _deprecated = 'deprecated' - _param_cls = SimParameter - - def __new__(mcs, name, bases, attr): - # use only with Simulation subclasses - if not CommonBase.get_parents(bases, SimBase): - LOGGER.debug('bases:\n%r', bases) - return super(SimBase, mcs).__new__(mcs, name, bases, attr) - # set _meta combined from bases - attr = mcs.set_meta(bases, attr) - # let some attributes in subclasses be override super - attributes = attr.pop(mcs._attributes, None) - deprecated = attr.pop(mcs._deprecated, None) - # set param file full path if simulations path and file specified or - # try to set parameters from class attributes except private/magic - attr = mcs.set_param_file_or_parameters(attr) - # reset subclass attributes - if attributes is not None: - attr[mcs._attributes] = attributes - if deprecated is not None: - attr[mcs._deprecated] = deprecated - LOGGER.debug('attibutes:\n%r', attr) - return super(SimBase, mcs).__new__(mcs, name, bases, attr) - - -class Simulation(object): - """ - A class for simulations. - - :param simfile: Filename of simulation configuration file. - :type simfile: str - :param settings: keyword name of simulation parameter to use for settings - :type str: - - Simulation attributes can be passed directly as keyword arguments directly - to :class:`~carousel.core.simulations.Simulation` or in a JSON file or as - class attributes in a subclass or a combination of all 3 methods. - - To get a list of :class:`~carousel.core.simulations.Simulation` attributes - and defaults get the :attr:`~carousel.core.simulations.Simulation.attrs` - attribute. - - Any additional settings provided as keyword arguments will override settings - from file. - """ - __metaclass__ = SimBase - attrs = { - 'ID': None, - 'path': os.path.join('~', 'Carousel', 'Simulations'), - 'commands': ['start', 'pause'], - 'data': None, - 'thresholds': None, - 'interval': 1 * UREG.hour, - 'sim_length': 1 * UREG.year, - 'display_frequency': 1, - 'display_fields': None, - 'write_frequency': 8760, - 'write_fields': None - } - deprecated = { - 'interval': 'interval_length', - 'sim_length': 'simulation_length' - } - - def __init__(self, simfile=None, settings=None, **kwargs): - # load simfile if it's an argument - if simfile is not None: - # read and load JSON parameter map file as "parameters" - self.param_file = simfile - with open(self.param_file, 'r') as param_file: - file_params = json.load(param_file) - #: simulation parameters from file - self.parameters = {settings: SimParameter(**params) for - settings, params in file_params.iteritems()} - # if not subclassed and metaclass skipped, then use kwargs - if not hasattr(self, 'parameters'): - #: parameter file - self.param_file = None - #: simulation parameters from keyword arguments - self.parameters = kwargs - else: - # use first settings - if settings is None: - self.settings, self.parameters = self.parameters.items()[0] - else: - #: name of sim settings used for parameters - self.settings = settings - self.parameters = self.parameters[settings] - # use any keyword arguments instead of parameters - self.parameters.update(kwargs) - # make pycharm happy - attributes assigned in loop by attrs - self.thresholds = {} - self.display_frequency = 0 - self.display_fields = {} - self.write_frequency = 0 - self.write_fields = {} - # pop deprecated attribute names - for k, v in self.deprecated.iteritems(): - val = self.parameters['extras'].pop(v, None) - # update parameters if deprecated attr used and no new attr - if val and k not in self.parameters: - self.parameters[k] = val - # Attributes - for k, v in self.attrs.iteritems(): - setattr(self, k, self.parameters.get(k, v)) - # member docstrings are in documentation since attrs are generated - if self.ID is None: - # generate id from object class name and datetime in ISO format - self.ID = id_maker(self) - if self.path is not None: - # expand environment variables, ~ and make absolute path - self.path = os.path.expandvars(os.path.expanduser(self.path)) - self.path = os.path.abspath(self.path) - # convert simulation interval to Pint Quantity - if isinstance(self.interval, basestring): - self.interval = UREG(self.interval) - elif not isinstance(self.interval, Q_): - self.interval = self.interval[0] * UREG(str(self.interval[1])) - # convert simulation length to Pint Quantity - if isinstance(self.sim_length, basestring): - self.sim_length = UREG(self.sim_length) - elif not isinstance(self.sim_length, Q_): - self.sim_length = self.sim_length[0] * UREG(str(self.sim_length[1])) - # convert simulation length to interval units to calc total intervals - sim_to_interval_units = self.sim_length.to(self.interval.units) - #: total number of intervals simulated - self.number_intervals = np.ceil(sim_to_interval_units / self.interval) - #: interval index, start at zero - self.interval_idx = 0 - #: pause status - self._ispaused = False - #: finished status - self._iscomplete = False - #: initialized status - self._isinitialized = False - #: order of calculations - self.calc_order = [] - #: command queue - self.cmd_queue = Queue.Queue() - #: index iterator - self.idx_iter = self.index_iterator() - #: data loaded status - self._is_data_loaded = False - - @property - def ispaused(self): - """ - Pause property, read only. True if paused. - """ - return self._ispaused - - @property - def iscomplete(self): - """ - Completion property, read only. True if finished. - """ - return self._iscomplete - - @property - def isinitialized(self): - """ - Initialization property, read only. True if initialized. - """ - return self._isinitialized - - @property - def is_data_loaded(self): - """ - Data loaded property, read only. True if data loaded. - """ - return self._is_data_loaded - - def check_data(self, data): - """ - Check if data loaded for all sources in data layer. - - :param data: data layer from model - :type data: :class:`~carousel.core.layer.Data` - :return: dictionary of data sources and objects or `None` if not loaded - """ - data_objs = { - data_src: data.objects.get(data_src) for data_src in data.layer - } - self._is_data_loaded = all(data_objs.values()) - return data_objs - - def initialize(self, calc_reg): - """ - Initialize the simulation. Organize calculations by dependency. - - :param calc_reg: Calculation registry. - :type calc_reg: - :class:`~carousel.core.calculation.CalcRegistry` - """ - self._isinitialized = True - # TODO: if calculations are edited, loaded, added, etc. then reset - self.calc_order = topological_sort(calc_reg.dependencies) - - def index_iterator(self): - """ - Generator that resumes from same index, or restarts from sent index. - """ - idx = 0 # index - while idx < self.number_intervals: - new_idx = yield idx - idx += 1 - if new_idx: - idx = new_idx - 1 - - # TODO: change start to run - - def start(self, model, progress_hook=None): - """ - Start the simulation from time zero. - - :param model: Model with layers and registries containing parameters - :type: :class:`~carousel.core.models.Model` - :param progress_hook: A function that receives either a string or a - list containing the index followed by tuples of the data or outputs - names and values specified by ``write_fields`` in the simfile. - :type progress_hook: function - - - The model registries should contain the following layer registries: - * :class:`~carousel.core.data_sources.DataRegistry`, - * :class:`~carousel.core.formulas.FormulaRegistry`, - * :class:`~carousel.core.outputs.OutputRegistry`, - * :class:`~carousel.core.calculation.CalcRegistry` - """ - # check if data loaded - data_objs = self.check_data(model.data) - if not self.is_data_loaded: - raise MissingDataError([ds for ds in data_objs if ds is None]) - # get layer registries - data_reg = model.registries['data'] - formula_reg = model.registries['formulas'] - out_reg = model.registries['outputs'] - calc_reg = model.registries['calculations'] - # initialize - if not self.isinitialized: - self.initialize(calc_reg) - # default progress hook - if not progress_hook: - progress_hook = functools.partial( - sim_progress_hook, display_header=True - ) - # start, resume or restart - if self.ispaused: - # if paused, then resume, do not resize outputs again. - self._ispaused = False # change pause state - progress_hook('resume simulation') - elif self.iscomplete: - # if complete, then restart, do not resize outputs again. - self._iscomplete = False # change pause state - progress_hook('restart simulation') - self.idx_iter = self.index_iterator() - else: - # resize outputs - # assumes that self.write_frequency is immutable - # TODO: allow self.write_frequency to be changed - # only resize outputs first time simulation is started - # repeat output rows to self.write_frequency - # put initial conditions of outputs last so it's copied when - # idx == 0 - progress_hook('resize outputs') # display progress - for k in out_reg: - if out_reg.isconstant[k]: - continue - # repeat rows (axis=0) - out_reg[k] = out_reg[k].repeat(self.write_frequency, 0) - _initial_value = out_reg.initial_value[k] - if not _initial_value: - continue - if isinstance(_initial_value, basestring): - # initial value is from data registry - # assign in a scalar to a vector fills in the vector, yes! - out_reg[k][-1] = data_reg[_initial_value] - else: - out_reg[k][-1] = _initial_value * out_reg[k].units - progress_hook('start simulation') - # check and/or make Carousel_Simulations and simulation ID folders - mkdir_p(self.path) - sim_id_path = os.path.join(self.path, self.ID) - mkdir_p(sim_id_path) - # header & units for save files - data_fields = self.write_fields.get('data', []) # any data fields - out_fields = self.write_fields.get('outputs', []) # any outputs fields - save_header = tuple(data_fields + out_fields) # concatenate fields - # get units as strings from data & outputs - data_units = [str(data_reg[f].dimensionality) for f in data_fields] - out_units = [str(out_reg[f].dimensionality) for f in out_fields] - save_units = tuple(data_units + out_units) # concatenate units - # string format for header & units - save_str = ('%s' + ',%s' * (len(save_header) - 1)) + '\n' # format - save_header = (save_str * 2) % (save_header + save_units) # header - save_header = save_header[:-1] # remove trailing new line - # =================== - # Static calculations - # =================== - progress_hook('static calcs') - for calc in self.calc_order: - if not calc_reg.is_dynamic[calc]: - calc_reg.calculator[calc].calculate( - calc_reg[calc], formula_reg, data_reg, out_reg - ) - # ==================== - # Dynamic calculations - # ==================== - progress_hook('dynamic calcs') - # TODO: assumes that interval size and indices are same, but should - # interpolate for any size interval or indices - for idx_tot in self.idx_iter: - self.interval_idx = idx_tot # update simulation interval counter - idx = idx_tot % self.write_frequency - # update properties - for k, v in out_reg.isproperty.iteritems(): - # set properties from previous interval at night - if v: - out_reg[k][idx] = out_reg[k][idx - 1] - # night if any threshold exceeded - if self.thresholds: - night = not all(limits[0] < data_reg[data][idx] < limits[1] for - data, limits in self.thresholds.iteritems()) - else: - night = None - # daytime or always calculated outputs - for calc in self.calc_order: - # Determine if calculation is scheduled for this timestep - # TODO: add ``start_at`` parameter combined with ``frequency`` - freq = calc_reg.frequency[calc] - if not freq.dimensionality: - is_scheduled = (idx_tot % freq) == 0 - else: - # Frequency with units of time - is_scheduled = ((idx_tot * self.interval) % freq) == 0 - is_scheduled = ( - is_scheduled and (not night or calc_reg.always_calc[calc]) - ) - if calc_reg.is_dynamic[calc] and is_scheduled: - calc_reg.calculator[calc].calculate( - calc_reg[calc], formula_reg, data_reg, out_reg, - timestep=self.interval, idx=idx - ) - # display progress - if not (idx % self.display_frequency): - progress_hook(self.format_progress(idx, data_reg, out_reg)) - # disp_head = False - # create an index for the save file, 0 if not saving - if not ((idx_tot + 1) % self.write_frequency): - savenum = (idx_tot + 1) / self.write_frequency - elif idx_tot == self.number_intervals - 1: - # save file index should be integer! - savenum = int(np.ceil((idx_tot + 1) / - float(self.write_frequency))) - else: - savenum = 0 # not saving this iteration - # save file to disk - if savenum: - savename = self.ID + '_' + str(savenum) + '.csv' # filename - savepath = os.path.join(sim_id_path, savename) # path - # create array of all data & outputs to save - save_array = self.format_write(data_reg, out_reg, idx + 1) - # save as csv using default format & turn comments off - np.savetxt(savepath, save_array, delimiter=',', - header=save_header, comments='') - try: - cmd = self.cmd_queue.get_nowait() - except Queue.Empty: - continue - if cmd == 'pause': - self._ispaused = True - return - self._iscomplete = True # change completion status - - def format_progress(self, idx, data_reg, out_reg): - data_fields = self.display_fields.get('data', []) # data fields - data_args = [(f, data_reg[f][idx]) for f in data_fields] - out_fields = self.display_fields.get('outputs', []) # outputs fields - out_args = [(f, out_reg[f][idx]) for f in out_fields] - return [idx] + data_args + out_args - - def format_write(self, data_reg, out_reg, idx=None): - data_fields = self.write_fields.get('data', []) # any data fields - data_args = [data_reg[f][:idx].reshape((-1, 1)) for f in data_fields] - out_fields = self.write_fields.get('outputs', []) # any outputs fields - out_args = [out_reg[f][:idx] for f in out_fields] - return np.concatenate(data_args + out_args, axis=1) - - def pause(self, progress_hook=None): - """ - Pause the simulation. How is this different from stopping it? Maintain - info sufficient to restart simulation. Sets ``is_paused`` to True. - Will this state allow analysis? changing parameters? What can you do - with a paused simulation? - Should be capable of saving paused simulation for loading/resuming - later, that is the main usage. EG: someone else need computer, or power - goes out, so on battery backup quickly pause simulation, and save. - Is save automatic? Should there be a parameter for auto save changed? - """ - # default progress hook - if progress_hook is None: - progress_hook = sim_progress_hook - progress_hook('simulation paused') - self.cmd_queue.put('pause') - self._ispaused = True - - def load(self, model, progress_hook=None, *args, **kwargs): - # default progress hook - if progress_hook is None: - progress_hook = sim_progress_hook - data = kwargs.get('data', {}) - if not data and args: - data = args[0] - for k, v in data.iteritems(): - progress_hook('loading simulation for %s' % k) - model.data.open(k, **v) - self.check_data(model.data) - - def run(self, model, progress_hook=None, *args, **kwargs): - # default progress hook - if progress_hook is None: - progress_hook = sim_progress_hook - progress_hook('running simulation') - self.load(model, progress_hook, *args, **kwargs) - self.start(model, progress_hook) diff --git a/carousel/docs/conf.py.bak b/carousel/docs/conf.py.bak deleted file mode 100644 index 8dbdc4e..0000000 --- a/carousel/docs/conf.py.bak +++ /dev/null @@ -1,314 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Carousel documentation build configuration file, created by -# sphinx-quickstart on Wed Feb 10 14:16:34 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath(os.path.join('..', '..'))) - -from carousel import __version__, __release__, __author__, __name__ - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.coverage', - 'sphinx.ext.mathjax', - 'sphinx.ext.viewcode', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Carousel' -copyright = u'2016, SunPower' -author = __author__ - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = __version__ -# The full version, including alpha/beta/rc tags. -release = __release__ - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - 'logo': 'sp_2014_logo_black_orange_rgb.png', - 'logo_name': True, - 'description': 'Model Simulation Framework', - 'github_user': 'SunPower', - 'github_repo': 'Carousel', - 'github_banner': True, - 'travis_button': True, - 'show_related': True -} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = 'sp_2014_logo_black_orange_rgb.png' - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -html_favicon = 'favicon.ico' - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -html_sidebars = { - '**': [ - 'about.html', - 'navigation.html', - 'searchbox.html', - 'relations.html' - ] -} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = '%sdoc' % __name__ - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, '%s.tex' % __name__, u'%s Documentation' % __name__, - __author__, 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, __name__.lower(), u'%s Documentation' % __name__, - [author], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, __name__, u'%s Documentation' % __name__, - author, __name__, 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - 'numpy': ('https://docs.scipy.org/doc/numpy/', None), - 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None), - 'python': ('https://docs.python.org/', None)} diff --git a/carousel/tests/test_calcs.py.bak b/carousel/tests/test_calcs.py.bak deleted file mode 100644 index f792625..0000000 --- a/carousel/tests/test_calcs.py.bak +++ /dev/null @@ -1,124 +0,0 @@ -""" -test calculations -""" - -from nose.tools import ok_, eq_ -from carousel.core.calculations import Calc, CalcParameter -from carousel.core.calculators import Calculator -from carousel.tests import PROJ_PATH, sandia_performance_model -import os -import uncertainties -from pvlib.solarposition import get_solarposition as solpos -import logging -import numpy as np - -LOGGER = logging.getLogger(__name__) - - -def test_calc_metaclass(): - """ - Test calculation class is created with params file using metaclass - """ - - class CalcTest1(Calc): - class Meta: - calcs_file = 'utils.json' - calcs_path = os.path.join(PROJ_PATH, 'calculations') - - calc_test1 = CalcTest1() - ok_(isinstance(calc_test1, Calc)) - eq_(calc_test1.param_file, - os.path.join(PROJ_PATH, 'calculations', 'utils.json')) - - class CalcTest2(Calc): - energy = CalcParameter( - is_dynamic=False, - dependencies=["ac_power", "daterange"], - formula="f_energy", - args={"outputs": {"ac_power": "Pac", "times": "timestamps"}}, - returns=["hourly_energy", "hourly_timeseries"] - ) - monthly_rollup = CalcParameter( - is_dynamic=False, - dependencies=["energy"], - formula="f_rollup", - args={ - "data": {"freq": "MONTHLY"}, - "outputs": {"items": "hourly_energy", - "times": "hourly_timeseries"} - }, - returns=["monthly_energy"] - ) - yearly_rollup = CalcParameter( - is_dynamic=False, - dependencies=["energy"], - formula="f_rollup", - args={"data": {"freq": "YEARLY"}, - "outputs": {"items": "hourly_energy", - "times": "hourly_timeseries"}}, - returns=["annual_energy"] - ) - - calc_test2 = CalcTest2() - ok_(isinstance(calc_test2, Calc)) - for k, v in calc_test1.parameters.iteritems(): - eq_(calc_test2.parameters[k], v) - - -def test_static_calc_unc(): - """ - Test uncertainty propagation in static calculations using Uncertainties. - """ - - # FIXME: this shouldn't have to run a model to test the uncertainty - test_model_file = os.path.join(PROJ_PATH, 'models', - 'sandia_performance_model-Tuscon.json') - test_model = sandia_performance_model.SAPM(test_model_file) # create model - test_model.command('start') # start simulation - # get parameters from model - dt = test_model.outputs.reg['timestamps'] # timestamps - latitude = test_model.data.reg['latitude'].m # latitude [degrees] - longitude = test_model.data.reg['longitude'].m # longitude [degrees] - zenith = test_model.outputs.reg['solar_zenith'].m # zenith [degrees] - s_ze_ze = test_model.outputs.reg.variance['solar_zenith']['solar_zenith'] - azimuth = test_model.outputs.reg['solar_azimuth'].m # azimuth [degrees] - s_az_az = test_model.outputs.reg.variance['solar_azimuth']['solar_azimuth'] - # get uncertainties percentages in base units - lat_unc = test_model.data.reg.uncertainty['latitude']['latitude'] - lat_unc = lat_unc.to_base_units().m - lon_unc = test_model.data.reg.uncertainty['longitude']['longitude'] - lon_unc = lon_unc.to_base_units().m - # create ufloat Uncertainties from parameters - lat_unc = uncertainties.ufloat(latitude, np.abs(latitude * lat_unc)) - lon_unc = uncertainties.ufloat(longitude, np.abs(longitude * lon_unc)) - test_unc = [] # empty list to collect return values - for n in xrange(96): - # Uncertainties wrapped functions must return only scalar float - f_ze_unc = uncertainties.wrap( - lambda lat, lon: solpos(dt[n], lat, lon)['apparent_zenith'].item() - ) - f_az_unc = uncertainties.wrap( - lambda lat, lon: solpos(dt[n], lat, lon)['azimuth'].item() - ) - ze_unc, az_unc = f_ze_unc(lat_unc, lon_unc), f_az_unc(lat_unc, lon_unc) - LOGGER.debug( - '%s: ze = %g +/- %g%%, az = %g +/- %g%%', dt[n].isoformat(), - zenith[n], np.sqrt(s_ze_ze[n]) * 100, - azimuth[n], np.sqrt(s_az_az[n]) * 100 - ) - LOGGER.debug( - 'Uncertainties test %2d: ze = %g +/- %g%%, az = %g +/- %g%%', n, - ze_unc.n, ze_unc.s / ze_unc.n * 100, - az_unc.n, az_unc.s / az_unc.n * 100 - ) - assert np.isclose(zenith[n], ze_unc.n) - assert np.isclose(np.sqrt(s_ze_ze[n]), ze_unc.s / ze_unc.n) - assert np.isclose(azimuth[n], az_unc.n) - assert np.isclose(np.sqrt(s_az_az[n]), az_unc.s / az_unc.n) - test_unc.append({'ze': ze_unc, 'az': az_unc}) - return test_model, test_unc - - -if __name__ == '__main__': - tm, tu = test_static_calc_unc() - test_calc_metaclass() diff --git a/carousel/tests/test_data.py.bak b/carousel/tests/test_data.py.bak deleted file mode 100644 index a1dba40..0000000 --- a/carousel/tests/test_data.py.bak +++ /dev/null @@ -1,181 +0,0 @@ -""" -Test data sources -""" - -from nose.tools import ok_, eq_ -from carousel.tests import logging -from carousel.core import UREG -from carousel.core.data_sources import DataSource, DataParameter -from carousel.core.data_readers import XLRDReader -from carousel.tests import PROJ_PATH, TESTS_DIR -import os - -LOGGER = logging.getLogger(__name__) -LOGGER.setLevel(logging.DEBUG) -TUSCON = os.path.join(PROJ_PATH, 'data', 'Tuscon.json') -XLRDREADER_TESTDATA = os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx') - - -def test_datasource_metaclass(): - """ - Test data source meta class. - """ - - class DataSourceTest1(DataSource): - """ - Test data source with parameters in file. - """ - class Meta: - data_file = 'pvpower.json' - data_path = os.path.join(PROJ_PATH, 'data') - - def __prepare_data__(self): - pass - - data_test1 = DataSourceTest1(TUSCON) - ok_(isinstance(data_test1, DataSource)) - eq_(data_test1.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json')) - - class DataSourceTest2(DataSource): - """ - Test data source with parameters in code. - """ - latitude = DataParameter(**{ - "description": "latitude", - "units": "degrees", - "isconstant": True, - "dtype": "float", - "uncertainty": 1.0 - }) - longitude = DataParameter(**{ - "description": "longitude", - "units": "degrees", - "isconstant": True, - "dtype": "float", - "uncertainty": 1.0 - }) - elevation = DataParameter(**{ - "description": "altitude of site above sea level", - "units": "meters", - "isconstant": True, - "dtype": "float", - "uncertainty": 1.0 - }) - timestamp_start = DataParameter(**{ - "description": "initial timestamp", - "isconstant": True, - "dtype": "datetime" - }) - timestamp_count = DataParameter(**{ - "description": "number of timesteps", - "isconstant": True, - "dtype": "int" - }) - module = DataParameter(**{ - "description": "PV module", - "isconstant": True, - "dtype": "str" - }) - inverter = DataParameter(**{ - "description": "PV inverter", - "isconstant": True, - "dtype": "str" - }) - module_database = DataParameter(**{ - "description": "module databases", - "isconstant": True, - "dtype": "str" - }) - inverter_database = DataParameter(**{ - "description": "inverter database", - "isconstant": True, - "dtype": "str" - }) - Tamb = DataParameter(**{ - "description": "average yearly ambient air temperature", - "units": "degC", - "isconstant": True, - "dtype": "float", - "uncertainty": 1.0 - }) - Uwind = DataParameter(**{ - "description": "average yearly wind speed", - "units": "m/s", - "isconstant": True, - "dtype": "float", - "uncertainty": 1.0 - }) - surface_azimuth = DataParameter(**{ - "description": "site rotation", - "units": "degrees", - "isconstant": True, - "dtype": "float", - "uncertainty": 1.0 - }) - timezone = DataParameter(**{ - "description": "timezone", - "isconstant": True, - "dtype": "str" - }) - - def __prepare_data__(self): - pass - - data_test2 = DataSourceTest2(TUSCON) - ok_(isinstance(data_test2, DataSource)) - for k, val in data_test1.parameters.iteritems(): - eq_(data_test2.parameters[k], val) - - class DataSourceTest4(DataSource): - """ - Test data source with parameters in file. - """ - latitude = DataParameter(**{ - "description": "latitude", - "units": "radians", - "isconstant": True, - "dtype": "float", - "uncertainty": 1.0 - }) - - class Meta: - data_file = 'pvpower.json' - data_path = os.path.join(PROJ_PATH, 'data') - - def __prepare_data__(self): - pass - - - data_test4 = DataSourceTest4(TUSCON) - ok_(isinstance(data_test4, DataSource)) - eq_(data_test4['latitude'].u, UREG.radians) - eq_(data_test4.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json')) - - -def test_xlrdreader_datasource(): - """ - Test data source with xlrd reader. - """ - - class DataSourceTest3(DataSource): - """ - Test data source with xlrd reader and params in file. - """ - class Meta: - data_reader = XLRDReader - data_file = 'xlrdreader_param.json' - data_path = TESTS_DIR - - def __prepare_data__(self): - pass - - data_test3 = DataSourceTest3(XLRDREADER_TESTDATA) - ok_(isinstance(data_test3, DataSource)) - eq_(data_test3._meta.data_reader, XLRDReader) - os.remove(os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx.json')) - LOGGER.debug('xlrdreader_testdata.xlsx.json has been cleaned') - - -if __name__ == '__main__': - test_datasource_metaclass() - test_xlrdreader_datasource() diff --git a/carousel/tests/test_formulas.py.bak b/carousel/tests/test_formulas.py.bak deleted file mode 100644 index cfba426..0000000 --- a/carousel/tests/test_formulas.py.bak +++ /dev/null @@ -1,95 +0,0 @@ -""" -test formulas -""" - -from nose.tools import ok_, eq_ -import numpy as np -from carousel.core import UREG -from carousel.core.formulas import ( - Formula, NumericalExpressionImporter, FormulaParameter -) -from carousel.tests import PROJ_PATH -import os - - -def test_formulas_metaclass(): - """ - Test Formulas - """ - - class FormulaTest1(Formula): - class Meta: - formulas_file = 'utils.json' - formulas_path = os.path.join(PROJ_PATH, 'formulas') - - formulas_test1 = FormulaTest1() - ok_(isinstance(formulas_test1, Formula)) - eq_(formulas_test1.param_file, - os.path.join(PROJ_PATH, 'formulas', 'utils.json')) - - class FormulaTest2(Formula): - f_daterange = FormulaParameter() - f_energy = FormulaParameter( - args=["ac_power", "times"], - units=[["watt_hour", None], ["W", None]] - ) - f_rollup = FormulaParameter( - args=["items", "times", "freq"], - units=["=A", ["=A", None, None]] - ) - - class Meta: - module = ".utils" - package = "formulas" - - formulas_test2 = FormulaTest2() - ok_(isinstance(formulas_test2, Formula)) - for k, v in formulas_test2.parameters.iteritems(): - eq_(formulas_test1.parameters[k], v) - - -def test_numexpr_formula(): - """ - Test formulas imported using ``numexpr`` - """ - - class NumexprFormula(Formula): - f_hypotenuse = FormulaParameter( - expression='sqrt(a * a + b * b)', - args=['a', 'b'], - units=[('=A', ), ('=A', '=A')], - isconstant=[] - ) - - class Meta: - formula_importer = NumericalExpressionImporter - - numexpr_formula = NumexprFormula() - ok_(isinstance(numexpr_formula, Formula)) - unc = 0.1 # uncertainty - var = unc**2 # variance - cov = np.array([[[var, 0], [0, var]], [[var, 0], [0, var]]]) - a = [3.0, 12.0] * UREG.cm - b = [4.0, 5.0] * UREG.cm - f_hypotenuse = numexpr_formula.formulas['f_hypotenuse'] - c, c_unc, c_jac = f_hypotenuse(a, b, __covariance__=cov) - assert np.allclose(c.m, np.array([5.0, 13.0])) - eq_(c.u, UREG.centimeter) - # import sympy - # x, y = sympy.symbols('x, y') - # z = sympy.sqrt(x * x + y * y) - # fx, fy = z.diff(x), z.diff(y) - # fx, fy = x/sqrt(x**2 + y**2), y/sqrt(x**2 + y**2) - # fx, fy = x/z, y/z - # dz = sqrt(fx**2 * dx**2 + fy**2 * dy**2) - da, db = a.m * unc, b.m * unc - fa, fb = a.m / c.m, b.m / c.m - dc = (fa ** 2 * da ** 2) + (fb ** 2 * db ** 2) - assert np.allclose(c_unc.squeeze(), dc) - fc = np.array([fa, fb]).T - assert np.allclose(c_jac.squeeze(), fc) - return numexpr_formula - - -if __name__ == '__main__': - f = test_numexpr_formula() diff --git a/carousel/tests/test_outputs.py.bak b/carousel/tests/test_outputs.py.bak deleted file mode 100644 index f7792bd..0000000 --- a/carousel/tests/test_outputs.py.bak +++ /dev/null @@ -1,40 +0,0 @@ -""" -test outputs -""" - -from nose.tools import ok_, eq_ -from carousel.core.outputs import Output -from carousel.tests import PROJ_PATH -import os - - -def test_outputs_metaclass(): - """ - Test Output Sources - """ - - class OutputTest1(Output): - class Meta: - outputs_file = 'pvpower.json' - outputs_path = os.path.join(PROJ_PATH, 'outputs') - - out_src_test1 = OutputTest1() - ok_(isinstance(out_src_test1, Output)) - eq_(out_src_test1.param_file, - os.path.join(PROJ_PATH, 'outputs', 'pvpower.json')) - - class OutputTest2(Output): - timestamps = {"isconstant": True, "size": 8761} - hourly_energy = { - "isconstant": True, - "timeseries": "hourly_timeseries", "units": "Wh", - "size": 8760 - } - hourly_timeseries = {"isconstant": True, "units": "Wh", "size": 8760} - monthly_energy = {"isconstant": True, "units": "Wh", "size": 12} - annual_energy = {"isconstant": True, "units": "Wh"} - - out_src_test2 = OutputTest2() - ok_(isinstance(out_src_test2, Output)) - for k, v in out_src_test2.parameters.iteritems(): - eq_(out_src_test1.parameters[k], v) diff --git a/carousel/tests/test_sim.py.bak b/carousel/tests/test_sim.py.bak deleted file mode 100644 index 76ca7b2..0000000 --- a/carousel/tests/test_sim.py.bak +++ /dev/null @@ -1,141 +0,0 @@ -""" -Simulation tests. -""" - -from carousel.core import logging, UREG -from carousel.core.models import Model, ModelParameter -from carousel.core.data_sources import DataParameter, DataSource -from carousel.core.formulas import FormulaParameter, Formula -from carousel.core.simulations import SimParameter, Simulation -from carousel.core.outputs import OutputParameter, Output -from carousel.core.calculations import Calc, CalcParameter -from carousel.contrib.readers import ArgumentReader -from carousel.tests import PROJ_PATH -import numpy as np -import os -import sympy - -LOGGER = logging.getLogger(__name__) - - -def test_make_sim_metaclass(): - """ - Test setting the simulation parameter file as class attributes versus - specifying the simulation parameter file in the model parameter file. - - :return: simulation - """ - - class SimTest1(Simulation): - class Meta: - sim_file = 'Tuscon.json' - sim_path = os.path.join(PROJ_PATH, 'simulations', 'Standalone') - - sim_test1 = SimTest1() - return sim_test1 - - -class PythagorasData(DataSource): - a = DataParameter(**{'units': 'cm', 'argpos': 0}) - b = DataParameter(**{'units': 'cm', 'argpos': 2}) - a_unc = DataParameter(**{'units': 'cm', 'argpos': 1}) - b_unc = DataParameter(**{'units': 'cm', 'argpos': 3}) - - class Meta: - data_cache_enabled = False - data_reader = ArgumentReader - - def __prepare_data__(self): - keys = self.parameters.keys() - for k in keys: - if k.endswith('_unc'): - unc = self.data.pop(k) - self.data_source.pop(k) - kunc = k[:-4] - v = self.data[kunc] - if not unc.dimensionless: - unc = (unc / v) - # raises dimensionality error if not dimensionless - self.uncertainty[kunc] = {kunc: unc.to(UREG.percent)} - else: - self.isconstant[k] = True - - -class PythagorasOutput(Output): - c = OutputParameter(**{'units': 'cm', 'isconstant': True}) - - -def f_hypotenuse(a, b): - a, b = np.atleast_1d(a), np.atleast_1d(b) - return np.sqrt(a * a + b * b).reshape(1, -1) - - -class PythagorasFormula(Formula): - f_hypotenuse = FormulaParameter( - args=['a', 'b'], - units=[('=A', ), ('=A', '=A')], - isconstant=[] - ) - - class Meta: - module = 'carousel.tests.test_sim' - - -class PythagorasCalc(Calc): - pythagorean_thm = CalcParameter( - is_dynamic=False, - formula='f_hypotenuse', - args={'data': {'a': 'a', 'b': 'b'}}, - returns=['c'] - ) - - -class PythagorasSim(Simulation): - settings = SimParameter( - ID='Pythagorean Theorem', - commands=['start', 'load', 'run', 'pause'], - path='~/Carousel_Tests', - thresholds=None, - interval=[1, 'hour'], - sim_length=[0, 'hour'], - write_frequency=1, - write_fields={'data': ['a', 'b'], 'outputs': ['c']}, - display_frequency=1, - display_fields={'data': ['a', 'b'], 'outputs': ['c']}, - ) - - -class PythagorasModel(Model): - data = ModelParameter(sources=[PythagorasData]) - outputs = ModelParameter(sources=[PythagorasOutput]) - formulas = ModelParameter(sources=[PythagorasFormula]) - calculations = ModelParameter(sources=[PythagorasCalc]) - simulations = ModelParameter(sources=[PythagorasSim]) - - class Meta: - modelpath = os.path.dirname(__file__) - - -def test_call_sim_with_args(): - a, a_unc, b, b_unc = 3.0, 0.1, 4.0, 0.1 - c = f_hypotenuse(a, b) - m1 = PythagorasModel() - data = {'PythagorasData': {'a': a, 'b': b, 'a_unc': a_unc, 'b_unc': b_unc}} - m1.command('run', data=data) - assert m1.registries['outputs']['c'].m == c - assert m1.registries['outputs']['c'].u == UREG.cm - x, y = sympy.symbols('x, y') - z = sympy.sqrt(x * x + y * y) - fx = sympy.lambdify((x, y), z.diff(x)) - fy = sympy.lambdify((x, y), z.diff(y)) - dz = np.sqrt(fx(a, b) ** 2 * a_unc ** 2 + fy(a, b) ** 2 * b_unc ** 2) - c_unc = c * np.sqrt(m1.registries['outputs'].variance['c']['c']) - LOGGER.debug('uncertainty in c is %g', c_unc) - assert np.isclose(dz, c_unc.item()) - c_unc = c * m1.registries['outputs'].uncertainty['c']['c'].to('fraction') - assert np.isclose(dz, c_unc.m.item()) - return m1 - - -if __name__ == '__main__': - m = test_call_sim_with_args() diff --git a/examples/PVPower/formulas/utils.py.bak b/examples/PVPower/formulas/utils.py.bak deleted file mode 100644 index ce7088d..0000000 --- a/examples/PVPower/formulas/utils.py.bak +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -This module contains formulas for calculating PV power. -""" - -import numpy as np -from scipy import constants as sc_const -import itertools -from dateutil import rrule -import pytz - - -def f_daterange(freq, tz='UTC', *args, **kwargs): - """ - Use ``dateutil.rrule`` to create a range of dates. The frequency must be a - string in the following list: YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, - MINUTELY or SECONDLY. - - See `dateutil rrule`_ documentation for more detail. - - .. _dateutil rrule: https://dateutil.readthedocs.org/en/latest/rrule.html - - :param freq: One of the ``dateutil.rrule`` frequencies - :type freq: str - :param tz: One of the ``pytz`` timezones, defaults to UTC - :type tz: str - :param args: start date , interval between each frequency , - max number of recurrences , end date - :param kwargs: ``dtstart``, ``interval``, ``count``, ``until`` - :return: range of dates - :rtype: list - """ - tz = pytz.timezone(tz) - freq = getattr(rrule, freq.upper()) # get frequency enumeration from rrule - return [tz.localize(dt) for dt in rrule.rrule(freq, *args, **kwargs)] - - -def f_energy(ac_power, times): - """ - Calculate the total energy accumulated from AC power at the end of each - timestep between the given times. - - :param ac_power: AC Power [W] - :param times: times - :type times: np.datetime64[s] - :return: energy [W*h] and energy times - """ - dt = np.diff(times) # calculate timesteps - # convert timedeltas to quantities - dt = dt.astype('timedelta64[s]').astype('float') / sc_const.hour - # energy accumulate during timestep - energy = dt * (ac_power[:-1] + ac_power[1:]) / 2 - return energy, times[1:] - - -def groupby_freq(items, times, freq, wkst='SU'): - """ - Group timeseries by frequency. The frequency must be a string in the - following list: YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY or - SECONDLY. The optional weekstart must be a string in the following list: - MO, TU, WE, TH, FR, SA and SU. - - :param items: items in timeseries - :param times: times corresponding to items - :param freq: One of the ``dateutil.rrule`` frequency constants - :type freq: str - :param wkst: One of the ``dateutil.rrule`` weekday constants - :type wkst: str - :return: generator - """ - timeseries = zip(times, items) # timeseries map of items - # create a key lambda to group timeseries by - if freq.upper() == 'DAILY': - def key(ts_): return ts_[0].day - elif freq.upper() == 'WEEKLY': - weekday = getattr(rrule, wkst.upper()) # weekday start - # generator that searches times for weekday start - days = (day for day in times if day.weekday() == weekday.weekday) - day0 = days.next() # first weekday start of all times - - def key(ts_): return (ts_[0] - day0).days // 7 - else: - def key(ts_): return getattr(ts_[0], freq.lower()[:-2]) - for k, ts in itertools.groupby(timeseries, key): - yield k, ts - - -def f_rollup(items, times, freq): - """ - Use :func:`groupby_freq` to rollup items - - :param items: items in timeseries - :param times: times corresponding to items - :param freq: One of the ``dateutil.rrule`` frequency constants - :type freq: str - """ - rollup = [np.sum(item for __, item in ts) - for _, ts in groupby_freq(items, times, freq)] - return np.array(rollup) diff --git a/examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak b/examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak deleted file mode 100644 index 3d637ef..0000000 --- a/examples/PVPower/pvpower/sandia_perfmod_newstyle.py.bak +++ /dev/null @@ -1,509 +0,0 @@ -""" -New Style Carousel Sandia Performance Model -""" - -from carousel.core.data_sources import DataSourceBase, DataSource, DataParameter -from carousel.core.formulas import Formula, FormulaParameter -from carousel.core.calculations import Calc, CalcParameter -from carousel.core.calculators import Calculator -from carousel.core.outputs import Output, OutputParameter -from carousel.core.simulations import Simulation, SimParameter -from carousel.core.models import Model, ModelParameter -from carousel.core import UREG -from datetime import datetime -import pvlib -import os -from pvpower import PROJ_PATH - -SANDIA_MODULES = os.path.join(PROJ_PATH, 'Sandia Modules.csv') -CEC_MODULES = os.path.join(PROJ_PATH, 'CEC Modules.csv') -CEC_INVERTERS = os.path.join(PROJ_PATH, 'CEC Inverters.csv') - - -class PVPowerData(DataSource): - """ - Data sources for PV Power demo. - """ - latitude = DataParameter(units="degrees", uncertainty=1.0) - longitude = DataParameter(units="degrees", uncertainty=1.0) - elevation = DataParameter(units="meters", uncertainty=1.0) - timestamp_start = DataParameter() - timestamp_count = DataParameter() - module = DataParameter() - inverter = DataParameter() - module_database = DataParameter() - inverter_database = DataParameter() - Tamb = DataParameter(units="degC", uncertainty=1.0) - Uwind = DataParameter(units="m/s", uncertainty=1.0) - surface_azimuth = DataParameter(units="degrees", uncertainty=1.0) - timezone = DataParameter() - - def __prepare_data__(self): - parameters = getattr(self, DataSourceBase._param_attr) - # set frequencies - for k in ('HOURLY', 'MONTHLY', 'YEARLY'): - self.data[k] = k - self.isconstant[k] = True - # apply metadata - for k, v in parameters.iteritems(): - # TODO: this should be applied in data reader using _meta_names from - # data registry which should use a meta class and all parameter - # files should have same layout even xlrd and numpy readers, etc. - self.isconstant[k] = True # set all data "isconstant" True - # uncertainty is dictionary - if 'uncertainty' in v: - self.uncertainty[k] = {k: v['uncertainty'] * UREG.percent} - # convert initial timestamp to datetime - self.data['timestamp_start'] = datetime(*self.data['timestamp_start']) - # get module and inverter databases - self.data['module_database'] = pvlib.pvsystem.retrieve_sam( - self.data['module_database'], path=SANDIA_MODULES - ) - self.data['inverter_database'] = pvlib.pvsystem.retrieve_sam( - self.data['inverter_database'], path=CEC_INVERTERS - ) - # get module and inverter - self.data['module'] = self.data['module_database'][self.data['module']] - self.data['inverter'] = ( - self.data['inverter_database'][self.data['inverter']] - ) - - -class UtilityFormulas(Formula): - """ - Formulas for PV Power demo - """ - f_daterange = FormulaParameter() - f_energy = FormulaParameter( - args=["ac_power", "times"], - units=[["watt_hour", None], ["W", None]] - ) - f_rollup = FormulaParameter( - args=["items", "times", "freq"], - units=["=A", ["=A", None, None]] - ) - - class Meta: - module = ".utils" - package = "formulas" - - -class PerformanceFormulas(Formula): - """ - Formulas for performance calcs - """ - f_ac_power = FormulaParameter( - args=["inverter", "v_mp", "p_mp"], - units=["W", [None, "V", "W"]] - ) - f_dc_power = FormulaParameter( - args=["effective_irradiance", "cell_temp", "module"], - units=[["A", "A", "V", "V", "W"], ["suns", "degC", None]] - ) - f_effective_irradiance = FormulaParameter( - args=["poa_direct", "poa_diffuse", "am_abs", "aoi", "module"], - units=["suns", ["W/m**2", "W/m**2", "dimensionless", "deg", None]] - ) - f_cell_temp = FormulaParameter( - args=["poa_global", "wind_speed", "air_temp"], - units=[["degC", "degC"], ["W/m**2", "m/s", "degC"]] - ) - f_aoi = FormulaParameter( - args=["surface_tilt", "surface_azimuth", "solar_zenith", - "solar_azimuth"], - units=["deg", ["deg", "deg", "deg", "deg"]] - ) - - class Meta: - module = ".performance" - package = "formulas" - - -class IrradianceFormulas(Formula): - """ - Formulas for irradiance calcs - """ - f_linketurbidity = FormulaParameter( - args=["times", "latitude", "longitude"], - units=["dimensionless", [None, "deg", "deg"]], - isconstant=["times"] - ) - f_clearsky = FormulaParameter( - args=["solar_zenith", "am_abs", "tl", "dni_extra", "altitude"], - units=[ - ["W/m**2", "W/m**2", "W/m**2"], - ["deg", "dimensionless", "dimensionless", "W/m**2", "m"] - ], - isconstant=["dni_extra"] - ) - f_solpos = FormulaParameter( - args=["times", "latitude", "longitude"], - units=[["degree", "degree"], [None, "degree", "degree"]], - isconstant=["times"] - ) - f_dni_extra = FormulaParameter(args=["times"], units=["W/m**2", [None]]) - f_airmass = FormulaParameter( - args=["solar_zenith"], units=["dimensionless", ["deg"]], - isconstant=[] - ) - f_pressure = FormulaParameter( - args=["altitude"], units=["Pa", ["m"]], isconstant=[] - ) - f_am_abs = FormulaParameter( - args=["airmass", "pressure"], - units=["dimensionless", ["dimensionless", "Pa"]], - isconstant=[] - ) - f_total_irrad = FormulaParameter( - args=[ - "times", "surface_tilt", "surface_azimuth", "solar_zenith", - "solar_azimuth", "dni", "ghi", "dhi", "dni_extra", "am_abs" - ], - units=[ - ["W/m**2", "W/m**2", "W/m**2"], - [ - None, "deg", "deg", "deg", "deg", "W/m**2", "W/m**2", - "W/m**2", - "W/m**2", "dimensionless" - ] - ], - isconstant=["times", "dni_extra"] - ) - - class Meta: - module = ".irradiance" - package = "formulas" - - -class UtilityCalcs(Calc): - """ - Calculations for PV Power demo - """ - energy = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["ac_power", "daterange"], - formula="f_energy", - args={"outputs": {"ac_power": "Pac", "times": "timestamps"}}, - returns=["hourly_energy", "hourly_timeseries"] - ) - monthly_rollup = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["energy"], - formula="f_rollup", - args={ - "data": {"freq": "MONTHLY"}, - "outputs": {"items": "hourly_energy", - "times": "hourly_timeseries"} - }, - returns=["monthly_energy"] - ) - yearly_rollup = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["energy"], - formula="f_rollup", - args={"data": {"freq": "YEARLY"}, - "outputs": {"items": "hourly_energy", - "times": "hourly_timeseries"}}, - returns=["annual_energy"] - ) - - -class PerformanceCalcs(Calc): - """ - Calculations for performance - """ - aoi = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["solpos"], - formula="f_aoi", - args={"data": {"surface_tilt": "latitude", - "surface_azimuth": "surface_azimuth"}, - "outputs": {"solar_zenith": "solar_zenith", - "solar_azimuth": "solar_azimuth"}}, - returns=["aoi"] - ) - cell_temp = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["total_irradiance"], - formula="f_cell_temp", - args={"data": {"wind_speed": "Uwind", "air_temp": "Tamb"}, - "outputs": {"poa_global": "poa_global"}}, - returns=["Tcell", "Tmod"] - ) - effective_irradiance = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["total_irradiance", "aoi", "abs_airmass"], - formula="f_effective_irradiance", - args={"data": {"module": "module"}, - "outputs": {"poa_direct": "poa_direct", - "poa_diffuse": "poa_diffuse", "am_abs": "am_abs", - "aoi": "aoi"}}, - returns=["Ee"] - ) - dc_power = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["effective_irradiance", "cell_temp"], - formula="f_dc_power", - args={"data": {"module": "module"}, - "outputs": {"effective_irradiance": "Ee", "cell_temp": "Tcell"}}, - returns=["Isc", "Imp", "Voc", "Vmp", "Pmp"] - ) - ac_power = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["dc_power"], - formula="f_ac_power", - args={"data": {"inverter": "inverter"}, - "outputs": {"v_mp": "Vmp", "p_mp": "Pmp"}}, - returns=["Pac"] - ) - - -class IrradianceCalcs(Calc): - """ - Calculations for irradiance - """ - daterange = CalcParameter( - is_dynamic=False, - calculator=Calculator, - formula="f_daterange", - args={"data": {"freq": "HOURLY", "dtstart": "timestamp_start", - "count": "timestamp_count", "tz": "timezone"}}, - returns=["timestamps"] - ) - solpos = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["daterange"], - formula="f_solpos", - args={"data": {"latitude": "latitude", "longitude": "longitude"}, - "outputs": {"times": "timestamps"}}, - returns=["solar_zenith", "solar_azimuth"] - ) - extraterrestrial = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["daterange"], - formula="f_dni_extra", - args={"outputs": {"times": "timestamps"}}, - returns=["extraterrestrial"] - ) - airmass = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["solpos"], - formula="f_airmass", - args={"outputs": {"solar_zenith": "solar_zenith"}}, - returns=["airmass"] - ) - pressure = CalcParameter( - is_dynamic=False, - calculator=Calculator, - formula="f_pressure", - args={"data": {"altitude": "elevation"}}, - returns=["pressure"] - ) - abs_airmass = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["airmass", "pressure"], - formula="f_am_abs", - args={"outputs": {"airmass": "airmass", "pressure": "pressure"}}, - returns=["am_abs"] - ) - linke_turbidity = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=["daterange"], - formula="f_linketurbidity", - args={"data": {"latitude": "latitude", "longitude": "longitude"}, - "outputs": {"times": "timestamps"}}, - returns=["tl"] - ) - clearsky = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=[ - "solpos", "abs_airmass", "linke_turbidity", "extraterrestrial" - ], - formula="f_clearsky", - args={"data": {"altitude": "elevation"}, - "outputs": {"solar_zenith": "solar_zenith", "am_abs": "am_abs", - "tl": "tl", "dni_extra": "extraterrestrial"}}, - returns=["dni", "ghi", "dhi"] - ) - total_irradiance = CalcParameter( - is_dynamic=False, - calculator=Calculator, - dependencies=[ - "daterange", "solpos", "clearsky", "extraterrestrial", - "abs_airmass" - ], - formula="f_total_irrad", - args={ - "data": { - "surface_tilt": "latitude", "surface_azimuth": "surface_azimuth" - }, - "outputs": { - "times": "timestamps", "solar_zenith": "solar_zenith", - "solar_azimuth": "solar_azimuth", "dni": "dni", - "ghi": "ghi", "dhi": "dhi", "dni_extra": "extraterrestrial", - "am_abs": "am_abs" - } - }, - returns=["poa_global", "poa_direct", "poa_diffuse"] - ) - - -class PVPowerOutputs(Output): - """ - Outputs for PV Power demo - """ - timestamps = OutputParameter(isconstant=True, size=8761) - hourly_energy = OutputParameter( - isconstant=True, timeseries="hourly_timeseries", units="Wh", size=8760 - ) - hourly_timeseries = OutputParameter(isconstant=True, units="Wh", size=8760) - monthly_energy = OutputParameter(isconstant=True, units="Wh", size=12) - annual_energy = OutputParameter(isconstant=True, units="Wh") - - -class PerformanceOutputs(Output): - """ - Performance outputs for PV Power demo - """ - Pac = OutputParameter( - isconstant=True, timeseries="timestamps", units="W", size=8761 - ) - Isc = OutputParameter( - isconstant=True, timeseries="timestamps", units="A", size=8761 - ) - Imp = OutputParameter( - isconstant=True, timeseries="timestamps", units="A", size=8761 - ) - Voc = OutputParameter( - isconstant=True, timeseries="timestamps", units="V", size=8761 - ) - Vmp = OutputParameter( - isconstant=True, timeseries="timestamps", units="V", size=8761 - ) - Pmp = OutputParameter( - isconstant=True, timeseries="timestamps", units="W", size=8761 - ) - Ee = OutputParameter( - isconstant=True, timeseries="timestamps", units="dimensionless", - size=8761 - ) - Tcell = OutputParameter( - isconstant=True, timeseries="timestamps", units="degC", size=8761 - ) - Tmod = OutputParameter( - isconstant=True, timeseries="timestamps", units="degC", size=8761 - ) - - -class IrradianceOutputs(Output): - """ - Irradiance outputs for PV Power demo - """ - tl = OutputParameter( - isconstant=True, timeseries="timestamps", units="dimensionless", - size=8761 - ) - poa_global = OutputParameter( - isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 - ) - poa_direct = OutputParameter( - isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 - ) - poa_diffuse = OutputParameter( - isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 - ) - aoi = OutputParameter( - isconstant=True, timeseries="timestamps", units="deg", size=8761 - ) - solar_zenith = OutputParameter( - isconstant=True, timeseries="timestamps", units="deg", size=8761 - ) - solar_azimuth = OutputParameter( - isconstant=True, timeseries="timestamps", units="deg", size=8761 - ) - pressure = OutputParameter( - isconstant=True, timeseries="timestamps", units="Pa", size=1 - ) - airmass = OutputParameter( - isconstant=True, timeseries="timestamps", units="dimensionless", - size=8761 - ) - am_abs = OutputParameter( - isconstant=True, timeseries="timestamps", units="dimensionless", - size=8761 - ) - extraterrestrial = OutputParameter( - isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 - ) - dni = OutputParameter( - isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 - ) - dhi = OutputParameter( - isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 - ) - ghi = OutputParameter( - isconstant=True, timeseries="timestamps", units="W/m**2", size=8761 - ) - - -class PVPowerSim(Simulation): - """ - PV Power Demo Simulations - """ - settings = SimParameter( - ID="Tuscon_SAPM", - path="~/Carousel_Simulations", - thresholds=None, - interval=[1, "hour"], - sim_length=[0, "hours"], - write_frequency=0, - write_fields={ - "data": ["latitude", "longitude", "Tamb", "Uwind"], - "outputs": ["monthly_energy", "annual_energy"] - }, - display_frequency=12, - display_fields={ - "data": ["latitude", "longitude", "Tamb", "Uwind"], - "outputs": ["monthly_energy", "annual_energy"] - }, - commands=['start', 'pause'] - ) - - -class NewSAPM(Model): - """ - PV Power Demo model - """ - data = ModelParameter( - layer='Data', sources=[(PVPowerData, {'filename': 'Tuscon.json'})] - ) - outputs = ModelParameter( - layer='Outputs', - sources=[PVPowerOutputs, PerformanceOutputs, IrradianceOutputs] - ) - formulas = ModelParameter( - layer='Formulas', - sources=[UtilityFormulas, PerformanceFormulas, IrradianceFormulas] - ) - calculations = ModelParameter( - layer='Calculations', - sources=[UtilityCalcs, PerformanceCalcs, IrradianceCalcs] - ) - simulations = ModelParameter(layer='Simulations', sources=[PVPowerSim]) - - class Meta: - modelpath = PROJ_PATH # folder containing project, not model diff --git a/examples/PVPower/pvpower/sandia_performance_model.py.bak b/examples/PVPower/pvpower/sandia_performance_model.py.bak deleted file mode 100644 index de5f2e7..0000000 --- a/examples/PVPower/pvpower/sandia_performance_model.py.bak +++ /dev/null @@ -1,167 +0,0 @@ -""" -Sandia Performance Model -""" - -from carousel.core.data_sources import DataSource -from carousel.core.formulas import Formula -from carousel.core.calculations import Calc -from carousel.core.calculators import Calculator -from carousel.core.outputs import Output -from carousel.core.simulations import Simulation -from carousel.core.models import Model -from carousel.core import UREG -from datetime import datetime -import pvlib -import os -from pvpower import PROJ_PATH - -CALC_PATH = os.path.join(PROJ_PATH, 'calculations') -FORMULA_PATH = os.path.join(PROJ_PATH, 'formulas') -DATA_PATH = os.path.join(PROJ_PATH, 'data') -SANDIA_MODULES = os.path.join(PROJ_PATH, 'Sandia Modules.csv') -CEC_MODULES = os.path.join(PROJ_PATH, 'CEC Modules.csv') -CEC_INVERTERS = os.path.join(PROJ_PATH, 'CEC Inverters.csv') - - -class PVPowerData(DataSource): - """ - Data sources for PV Power demo. - """ - class Meta: - data_file = 'pvpower.json' - data_path = DATA_PATH - - def __prepare_data__(self): - parameters = getattr(self, 'parameters') - # set frequencies - for k in ('HOURLY', 'MONTHLY', 'YEARLY'): - self.data[k] = k - self.isconstant[k] = True - # apply metadata - for k, v in parameters.iteritems(): - # TODO: this should be applied in data reader using _meta_names from - # data registry which should use a meta class and all parameter - # files should have same layout even xlrd and numpy readers, etc. - if 'isconstant' in v: - self.isconstant[k] = v['isconstant'] - # uncertainty is dictionary - if 'uncertainty' in v: - self.uncertainty[k] = {k: v['uncertainty'] * UREG.percent} - # convert initial timestamp to datetime - self.data['timestamp_start'] = datetime(*self.data['timestamp_start']) - # get module and inverter databases - self.data['module_database'] = pvlib.pvsystem.retrieve_sam( - self.data['module_database'], path=SANDIA_MODULES - ) - self.data['inverter_database'] = pvlib.pvsystem.retrieve_sam( - self.data['inverter_database'], path=CEC_INVERTERS - ) - # get module and inverter - self.data['module'] = self.data['module_database'][self.data['module']] - self.data['inverter'] = ( - self.data['inverter_database'][self.data['inverter']] - ) - - -class UtilityFormulas(Formula): - """ - Formulas for PV Power demo - """ - class Meta: - formulas_file = 'utils.json' - formulas_path = FORMULA_PATH - - -class PerformanceFormulas(Formula): - """ - Formulas for performance calcs - """ - class Meta: - formulas_file = 'performance.json' - formulas_path = FORMULA_PATH - - -class IrradianceFormulas(Formula): - """ - Formulas for irradiance calcs - """ - class Meta: - formulas_file = 'irradiance.json' - formulas_path = FORMULA_PATH - - -class UtilityCalcs(Calc): - """ - Calculations for PV Power demo - """ - class Meta: - calcs_file = 'utils.json' - calcs_path = CALC_PATH - dependencies = ['PerformanceCalcs'] - calculator = Calculator - is_dynamic = False - - -class PerformanceCalcs(Calc): - """ - Calculations for performance - """ - class Meta: - calcs_file = 'performance.json' - calcs_path = CALC_PATH - dependencies = ['IrradianceCalcs'] - calculator = Calculator - is_dynamic = False - - -class IrradianceCalcs(Calc): - """ - Calculations for irradiance - """ - class Meta: - calcs_file = 'irradiance.json' - calcs_path = CALC_PATH - calculator = Calculator - is_dynamic = False - - -class PVPowerOutputs(Output): - """ - Outputs for PV Power demo - """ - class Meta: - outputs_file = 'pvpower.json' - outputs_path = os.path.join(PROJ_PATH, 'outputs') - - -class PerformanceOutputs(Output): - """ - Performance outputs for PV Power demo - """ - class Meta: - outputs_file = 'performance.json' - outputs_path = os.path.join(PROJ_PATH, 'outputs') - - -class IrradianceOutputs(Output): - """ - Irradiance outputs for PV Power demo - """ - class Meta: - outputs_file = 'irradiance.json' - outputs_path = os.path.join(PROJ_PATH, 'outputs') - - -class Standalone(Simulation): - """ - PV Power Demo Simulations - """ - pass - - -class SAPM(Model): - """ - PV Power Demo model - """ - class Meta: - modelpath = PROJ_PATH # folder containing project, not model diff --git a/examples/PVPower/pvpower/tests/test_pvpower.py.bak b/examples/PVPower/pvpower/tests/test_pvpower.py.bak deleted file mode 100644 index 0c7e79e..0000000 --- a/examples/PVPower/pvpower/tests/test_pvpower.py.bak +++ /dev/null @@ -1,114 +0,0 @@ -""" -Tests for pvpower formulas -""" - -from datetime import datetime, timedelta -import numpy as np -import pytz -from carousel.core import UREG, logging, models -from pvpower.sandia_performance_model import ( - UtilityFormulas, IrradianceFormulas -) -from pvpower import sandia_performance_model, sandia_perfmod_newstyle -from pvpower.tests import MODEL_PATH -import os - -LOGGER = logging.getLogger(__name__) -LOGGER.setLevel(logging.DEBUG) -TZ = 'US/Pacific' -PST = pytz.timezone(TZ) -UTIL_FORMULAS = UtilityFormulas() -IRRAD_FORMULAS = IrradianceFormulas() -DTSTART = datetime(2007, 1, 1, 0, 0, 0) -MONTHLY_ENERGY = [186000.0, 168000.0, 186000.0, 180000.0, 186000.0, 180000.0, - 186000.0, 186000.0, 180000.0, 186000.0, 180000.0, 186000.0] -ZENITH = [ - 84.67227032399542, 75.69700469024768, 68.32442897476993, 63.22974106430276, - 61.01563669117582, 62.00067006350331, 66.0382089666321, 72.60444584135432, - 81.04253480488877 -] -AZIMUTH = [ - 124.64915808, 135.21222923, 147.46982483, 161.53685504, 176.95197338, - 192.61960738, 207.30533949, 220.27975359, 231.46642409 -] -OLD_MODEL = os.path.join(MODEL_PATH, 'sandia_performance_model-Tuscon.json') -ANNUAL_ENERGY = np.array(479083.75869040738) - - -def test_daterange(): - """ - Test date range. - """ - test_range = 12 - dates = UTIL_FORMULAS['f_daterange']( - 'HOURLY', TZ, dtstart=DTSTART, count=test_range - ) - dtstart_local = PST.localize(DTSTART) - for hour in xrange(test_range): - assert dates[hour] == dtstart_local + timedelta(hours=hour) - assert dates[hour].tzinfo.zone == TZ - return dates - - -def test_solarposition(): - """ - Test solar position algorithm. - """ - lat, lon = 38.0 * UREG.degrees, -122.0 * UREG.degrees - times = UTIL_FORMULAS['f_daterange']( - 'HOURLY', TZ, dtstart=(DTSTART + timedelta(hours=8)), count=9 - ) - cov = np.array([[0.0001, 0], [0, 0.0001]]) - solpos = IRRAD_FORMULAS['f_solpos'](times, lat, lon, __covariance__=cov) - assert len(solpos) == 4 - ze, az, cov, jac = solpos - assert ze.u == UREG.degree - assert az.u == UREG.degree - assert np.allclose(ze.m, ZENITH) - assert np.allclose(az.m, AZIMUTH) - return solpos - - -def test_rollup(): - """ - Test rollup. - """ - dates = UTIL_FORMULAS['f_daterange']('HOURLY', dtstart=DTSTART, count=8761) - ac_power = 1000. * np.sin(np.arange(12) * np.pi / 12.0) ** 2 - ac_power = np.pad(ac_power, [6, 6], 'constant') - ac_power = np.append(np.tile(ac_power, (365,)), [0]) * UREG.watt - energy, energy_times = UTIL_FORMULAS['f_energy'](ac_power, dates) - assert energy.units == UREG.Wh - monthly_energy = UTIL_FORMULAS['f_rollup'](energy, energy_times, 'MONTHLY') - assert np.allclose(monthly_energy[:12], MONTHLY_ENERGY) - return dates, ac_power, energy, energy_times, monthly_energy - - -def test_new_style(): - """ - Test new style Carousel model. - """ - m = sandia_perfmod_newstyle.NewSAPM() - assert isinstance(m, models.Model) - m.command('start') - annual_energy = np.sum(m.registries['outputs']['annual_energy'].m) - assert np.isclose(annual_energy, ANNUAL_ENERGY) - return m - - -def test_old_style(): - """ - Test old style Carousel model. - """ - m = sandia_performance_model.SAPM(OLD_MODEL) - assert isinstance(m, models.Model) - m.command('start') - annual_energy = np.sum(m.registries['outputs']['annual_energy'].m) - assert np.isclose(annual_energy, ANNUAL_ENERGY) - return m - - -if __name__ == "__main__": - results = test_rollup() - m_old = test_old_style() - m_new = test_new_style() From 4f0c41fe57548bf616efaad5e76ef7e6d6d18175 Mon Sep 17 00:00:00 2001 From: Python3pkg Date: Thu, 18 May 2017 10:50:15 -0700 Subject: [PATCH 3/3] Update travis-ci for python3 test --- .travis.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index 12580ef..f94e1aa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,10 @@ language: python cache: pip sudo: required +python: + - "2.7" + - "3.6" + - "3.7-dev" # 3.7 development branch before_install: - sudo apt-get -qq update - sudo apt-get install -y gfortran